lang
stringclasses
2 values
license
stringclasses
13 values
stderr
stringlengths
0
343
commit
stringlengths
40
40
returncode
int64
0
128
repos
stringlengths
6
87.7k
new_contents
stringlengths
0
6.23M
new_file
stringlengths
3
311
old_contents
stringlengths
0
6.23M
message
stringlengths
6
9.1k
old_file
stringlengths
3
311
subject
stringlengths
0
4k
git_diff
stringlengths
0
6.31M
Java
mit
1a3bc509f9c4de02d1deff0872fca6cb8ebe7720
0
Bimde/Blackjack-Server
package gameplay; import java.util.ArrayList; import java.util.Collections; /** * Deck object, contains all the decks of cards to be used. */ public class Deck { private int numOfDecks; private ArrayList<Card> cards; private static final int SHUFFLE_REPEAT = 10; /** * Constructs a new Deck object. Sets the number of decks to the class * variable number of decks. Loads up the deck. * * @param numOfDecks * the number of decks to be used in the game. */ public Deck(int numOfDecks) { this.numOfDecks = numOfDecks; this.cards = new ArrayList<Card>(); this.reloadDeck(); } /** * Loops through the number of decks and adds the according number of cards * to the deck. */ public void reloadDeck() { // Clear the deck first to make things easier this.cards.clear(); // For the number of decks required, go through the number of suits and // add each rank for each suit for (int i = 0; i < this.numOfDecks; i++) { for (int suit = 0; suit < Dealer.SUITS.length; suit++) { for (int rank = 0; rank < Dealer.RANKS.length; rank++) { this.cards.add(new Card(Dealer.SUITS[suit], Dealer.RANKS[rank])); } } } // Shuffle the deck a random number of times to prevent seed // determination from clients int rand = (int) (Math.random() * SHUFFLE_REPEAT + 1); for (int i = 0; i < rand; i++) this.shuffle(); } /** * Shuffles the deck. */ public void shuffle() { Collections.shuffle(this.cards); } public int size() { return this.cards.size(); } /** * Removes the top card from the deck. * * @return the card removed from the deck. */ public Card getCard() { Card card = this.cards.get(0); this.cards.remove(0); return card; } }
src/gameplay/Deck.java
package gameplay; import java.util.ArrayList; import java.util.Collections; /** * Deck object, contains all the decks of cards to be used. */ public class Deck { private int numOfDecks; private ArrayList<Card> cards; /** * Constructs a new Deck object. Sets the number of decks to the class * variable number of decks. Loads up the deck. * * @param numOfDecks * the number of decks to be used in the game. */ public Deck(int numOfDecks) { this.numOfDecks = numOfDecks; this.cards = new ArrayList<Card>(); this.reloadDeck(); } /** * Loops through the number of decks and adds the according number of cards * to the deck. */ public void reloadDeck() { // Clear the deck first to make things easier this.cards.clear(); // For the number of decks required, go through the number of suits and // add each rank for each suit for (int i = 0; i < this.numOfDecks; i++) { for (int suit = 0; suit < Dealer.SUITS.length; suit++) { for (int rank = 0; rank < Dealer.RANKS.length; rank++) { this.cards.add(new Card(Dealer.SUITS[suit], Dealer.RANKS[rank])); } } } // Shuffle the deck this.shuffle(); } /** * Shuffles the deck. */ public void shuffle() { Collections.shuffle(this.cards); } public int size() { return this.cards.size(); } /** * Removes the top card from the deck. * * @return the card removed from the deck. */ public Card getCard() { Card card = this.cards.get(0); this.cards.remove(0); return card; } }
Hotfix for client seed guessing See https://github.com/Bimde/Blackjack-Server/issues/40
src/gameplay/Deck.java
Hotfix for client seed guessing
<ide><path>rc/gameplay/Deck.java <ide> public class Deck { <ide> private int numOfDecks; <ide> private ArrayList<Card> cards; <add> private static final int SHUFFLE_REPEAT = 10; <ide> <ide> /** <ide> * Constructs a new Deck object. Sets the number of decks to the class <ide> } <ide> } <ide> <del> // Shuffle the deck <del> this.shuffle(); <add> // Shuffle the deck a random number of times to prevent seed <add> // determination from clients <add> int rand = (int) (Math.random() * SHUFFLE_REPEAT + 1); <add> for (int i = 0; i < rand; i++) <add> this.shuffle(); <ide> } <ide> <ide> /**
Java
apache-2.0
d33503042e8296b0b540abcac7052ced750d1db1
0
apache/commons-lang,apache/commons-lang,apache/commons-lang
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.commons.lang3.time; import java.io.IOException; import java.io.ObjectInputStream; import java.io.Serializable; import java.text.DateFormatSymbols; import java.text.ParseException; import java.text.ParsePosition; import java.util.ArrayList; import java.util.Calendar; import java.util.Comparator; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.ListIterator; import java.util.Locale; import java.util.Map; import java.util.Set; import java.util.TimeZone; import java.util.TreeSet; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.apache.commons.lang3.LocaleUtils; /** * <p>FastDateParser is a fast and thread-safe version of * {@link java.text.SimpleDateFormat}.</p> * * <p>To obtain a proxy to a FastDateParser, use {@link FastDateFormat#getInstance(String, TimeZone, Locale)} * or another variation of the factory methods of {@link FastDateFormat}.</p> * * <p>Since FastDateParser is thread safe, you can use a static member instance:</p> * <code> * private static final DateParser DATE_PARSER = FastDateFormat.getInstance("yyyy-MM-dd"); * </code> * * <p>This class can be used as a direct replacement for * {@code SimpleDateFormat} in most parsing situations. * This class is especially useful in multi-threaded server environments. * {@code SimpleDateFormat} is not thread-safe in any JDK version, * nor will it be as Sun has closed the * <a href="http://bugs.sun.com/bugdatabase/view_bug.do?bug_id=4228335">bug</a>/RFE. * </p> * * <p>Only parsing is supported by this class, but all patterns are compatible with * SimpleDateFormat.</p> * * <p>The class operates in lenient mode, so for example a time of 90 minutes is treated as 1 hour 30 minutes.</p> * * <p>Timing tests indicate this class is as about as fast as SimpleDateFormat * in single thread applications and about 25% faster in multi-thread applications.</p> * * @since 3.2 * @see FastDatePrinter */ public class FastDateParser implements DateParser, Serializable { /** * Required for serialization support. * * @see java.io.Serializable */ private static final long serialVersionUID = 3L; static final Locale JAPANESE_IMPERIAL = new Locale("ja", "JP", "JP"); // defining fields private final String pattern; private final TimeZone timeZone; private final Locale locale; private final int century; private final int startYear; // derived fields private transient List<StrategyAndWidth> patterns; // comparator used to sort regex alternatives // alternatives should be ordered longer first, and shorter last. ('february' before 'feb') // all entries must be lowercase by locale. private static final Comparator<String> LONGER_FIRST_LOWERCASE = Comparator.reverseOrder(); /** * <p>Constructs a new FastDateParser.</p> * * Use {@link FastDateFormat#getInstance(String, TimeZone, Locale)} or another variation of the * factory methods of {@link FastDateFormat} to get a cached FastDateParser instance. * * @param pattern non-null {@link java.text.SimpleDateFormat} compatible * pattern * @param timeZone non-null time zone to use * @param locale non-null locale */ protected FastDateParser(final String pattern, final TimeZone timeZone, final Locale locale) { this(pattern, timeZone, locale, null); } /** * <p>Constructs a new FastDateParser.</p> * * @param pattern non-null {@link java.text.SimpleDateFormat} compatible * pattern * @param timeZone non-null time zone to use * @param locale non-null locale * @param centuryStart The start of the century for 2 digit year parsing * * @since 3.5 */ protected FastDateParser(final String pattern, final TimeZone timeZone, final Locale locale, final Date centuryStart) { this.pattern = pattern; this.timeZone = timeZone; this.locale = LocaleUtils.toLocale(locale); final Calendar definingCalendar = Calendar.getInstance(timeZone, this.locale); final int centuryStartYear; if (centuryStart != null) { definingCalendar.setTime(centuryStart); centuryStartYear = definingCalendar.get(Calendar.YEAR); } else if (this.locale.equals(JAPANESE_IMPERIAL)) { centuryStartYear = 0; } else { // from 80 years ago to 20 years from now definingCalendar.setTime(new Date()); centuryStartYear = definingCalendar.get(Calendar.YEAR) - 80; } century = centuryStartYear / 100 * 100; startYear = centuryStartYear - century; init(definingCalendar); } /** * Initializes derived fields from defining fields. * This is called from constructor and from readObject (de-serialization) * * @param definingCalendar the {@link java.util.Calendar} instance used to initialize this FastDateParser */ private void init(final Calendar definingCalendar) { patterns = new ArrayList<>(); final StrategyParser fm = new StrategyParser(definingCalendar); for (;;) { final StrategyAndWidth field = fm.getNextStrategy(); if (field == null) { break; } patterns.add(field); } } // helper classes to parse the format string //----------------------------------------------------------------------- /** * Holds strategy and field width */ private static class StrategyAndWidth { final Strategy strategy; final int width; StrategyAndWidth(final Strategy strategy, final int width) { this.strategy = strategy; this.width = width; } int getMaxWidth(final ListIterator<StrategyAndWidth> lt) { if (!strategy.isNumber() || !lt.hasNext()) { return 0; } final Strategy nextStrategy = lt.next().strategy; lt.previous(); return nextStrategy.isNumber() ? width : 0; } @Override public String toString() { return "StrategyAndWidth [strategy=" + strategy + ", width=" + width + "]"; } } /** * Parse format into Strategies */ private class StrategyParser { private final Calendar definingCalendar; private int currentIdx; StrategyParser(final Calendar definingCalendar) { this.definingCalendar = definingCalendar; } StrategyAndWidth getNextStrategy() { if (currentIdx >= pattern.length()) { return null; } final char c = pattern.charAt(currentIdx); if (isFormatLetter(c)) { return letterPattern(c); } return literal(); } private StrategyAndWidth letterPattern(final char c) { final int begin = currentIdx; while (++currentIdx < pattern.length()) { if (pattern.charAt(currentIdx) != c) { break; } } final int width = currentIdx - begin; return new StrategyAndWidth(getStrategy(c, width, definingCalendar), width); } private StrategyAndWidth literal() { boolean activeQuote = false; final StringBuilder sb = new StringBuilder(); while (currentIdx < pattern.length()) { final char c = pattern.charAt(currentIdx); if (!activeQuote && isFormatLetter(c)) { break; } else if (c == '\'' && (++currentIdx == pattern.length() || pattern.charAt(currentIdx) != '\'')) { activeQuote = !activeQuote; continue; } ++currentIdx; sb.append(c); } if (activeQuote) { throw new IllegalArgumentException("Unterminated quote"); } final String formatField = sb.toString(); return new StrategyAndWidth(new CopyQuotedStrategy(formatField), formatField.length()); } } private static boolean isFormatLetter(final char c) { return c >= 'A' && c <= 'Z' || c >= 'a' && c <= 'z'; } // Accessors //----------------------------------------------------------------------- /* (non-Javadoc) * @see org.apache.commons.lang3.time.DateParser#getPattern() */ @Override public String getPattern() { return pattern; } /* (non-Javadoc) * @see org.apache.commons.lang3.time.DateParser#getTimeZone() */ @Override public TimeZone getTimeZone() { return timeZone; } /* (non-Javadoc) * @see org.apache.commons.lang3.time.DateParser#getLocale() */ @Override public Locale getLocale() { return locale; } // Basics //----------------------------------------------------------------------- /** * <p>Compares another object for equality with this object.</p> * * @param obj the object to compare to * @return {@code true}if equal to this instance */ @Override public boolean equals(final Object obj) { if (!(obj instanceof FastDateParser)) { return false; } final FastDateParser other = (FastDateParser) obj; return pattern.equals(other.pattern) && timeZone.equals(other.timeZone) && locale.equals(other.locale); } /** * <p>Returns a hash code compatible with equals.</p> * * @return a hash code compatible with equals */ @Override public int hashCode() { return pattern.hashCode() + 13 * (timeZone.hashCode() + 13 * locale.hashCode()); } /** * <p>Gets a string version of this formatter.</p> * * @return a debugging string */ @Override public String toString() { return "FastDateParser[" + pattern + "," + locale + "," + timeZone.getID() + "]"; } // Serializing //----------------------------------------------------------------------- /** * Creates the object after serialization. This implementation reinitializes the * transient properties. * * @param in ObjectInputStream from which the object is being deserialized. * @throws IOException if there is an IO issue. * @throws ClassNotFoundException if a class cannot be found. */ private void readObject(final ObjectInputStream in) throws IOException, ClassNotFoundException { in.defaultReadObject(); final Calendar definingCalendar = Calendar.getInstance(timeZone, locale); init(definingCalendar); } /* (non-Javadoc) * @see org.apache.commons.lang3.time.DateParser#parseObject(java.lang.String) */ @Override public Object parseObject(final String source) throws ParseException { return parse(source); } /* (non-Javadoc) * @see org.apache.commons.lang3.time.DateParser#parse(java.lang.String) */ @Override public Date parse(final String source) throws ParseException { final ParsePosition pp = new ParsePosition(0); final Date date = parse(source, pp); if (date == null) { // Add a note re supported date range if (locale.equals(JAPANESE_IMPERIAL)) { throw new ParseException("(The " + locale + " locale does not support dates before 1868 AD)\n" + "Unparseable date: \"" + source, pp.getErrorIndex()); } throw new ParseException("Unparseable date: " + source, pp.getErrorIndex()); } return date; } /* (non-Javadoc) * @see org.apache.commons.lang3.time.DateParser#parseObject(java.lang.String, java.text.ParsePosition) */ @Override public Object parseObject(final String source, final ParsePosition pos) { return parse(source, pos); } /** * This implementation updates the ParsePosition if the parse succeeds. * However, it sets the error index to the position before the failed field unlike * the method {@link java.text.SimpleDateFormat#parse(String, ParsePosition)} which sets * the error index to after the failed field. * <p> * To determine if the parse has succeeded, the caller must check if the current parse position * given by {@link ParsePosition#getIndex()} has been updated. If the input buffer has been fully * parsed, then the index will point to just after the end of the input buffer. * * @see org.apache.commons.lang3.time.DateParser#parse(java.lang.String, java.text.ParsePosition) */ @Override public Date parse(final String source, final ParsePosition pos) { // timing tests indicate getting new instance is 19% faster than cloning final Calendar cal = Calendar.getInstance(timeZone, locale); cal.clear(); return parse(source, pos, cal) ? cal.getTime() : null; } /** * Parses a formatted date string according to the format. Updates the Calendar with parsed fields. * Upon success, the ParsePosition index is updated to indicate how much of the source text was consumed. * Not all source text needs to be consumed. Upon parse failure, ParsePosition error index is updated to * the offset of the source text which does not match the supplied format. * * @param source The text to parse. * @param pos On input, the position in the source to start parsing, on output, updated position. * @param calendar The calendar into which to set parsed fields. * @return true, if source has been parsed (pos parsePosition is updated); otherwise false (and pos errorIndex is updated) * @throws IllegalArgumentException when Calendar has been set to be not lenient, and a parsed field is * out of range. */ @Override public boolean parse(final String source, final ParsePosition pos, final Calendar calendar) { final ListIterator<StrategyAndWidth> lt = patterns.listIterator(); while (lt.hasNext()) { final StrategyAndWidth strategyAndWidth = lt.next(); final int maxWidth = strategyAndWidth.getMaxWidth(lt); if (!strategyAndWidth.strategy.parse(this, calendar, source, pos, maxWidth)) { return false; } } return true; } // Support for strategies //----------------------------------------------------------------------- private static StringBuilder simpleQuote(final StringBuilder sb, final String value) { for (int i = 0; i < value.length(); ++i) { final char c = value.charAt(i); switch (c) { case '\\': case '^': case '$': case '.': case '|': case '?': case '*': case '+': case '(': case ')': case '[': case '{': sb.append('\\'); default: sb.append(c); } } if (sb.charAt(sb.length() - 1) == '.') { // trailing '.' is optional sb.append('?'); } return sb; } /** * Gets the short and long values displayed for a field * @param cal The calendar to obtain the short and long values * @param locale The locale of display names * @param field The field of interest * @param regex The regular expression to build * @return The map of string display names to field values */ private static Map<String, Integer> appendDisplayNames(final Calendar cal, Locale locale, final int field, final StringBuilder regex) { final Map<String, Integer> values = new HashMap<>(); locale = LocaleUtils.toLocale(locale); final Map<String, Integer> displayNames = cal.getDisplayNames(field, Calendar.ALL_STYLES, locale); final TreeSet<String> sorted = new TreeSet<>(LONGER_FIRST_LOWERCASE); for (final Map.Entry<String, Integer> displayName : displayNames.entrySet()) { final String key = displayName.getKey().toLowerCase(locale); if (sorted.add(key)) { values.put(key, displayName.getValue()); } } for (final String symbol : sorted) { simpleQuote(regex, symbol).append('|'); } return values; } /** * Adjusts dates to be within appropriate century * @param twoDigitYear The year to adjust * @return A value between centuryStart(inclusive) to centuryStart+100(exclusive) */ private int adjustYear(final int twoDigitYear) { final int trial = century + twoDigitYear; return twoDigitYear >= startYear ? trial : trial + 100; } /** * A strategy to parse a single field from the parsing pattern */ private abstract static class Strategy { /** * Is this field a number? The default implementation returns false. * * @return true, if field is a number */ boolean isNumber() { return false; } abstract boolean parse(FastDateParser parser, Calendar calendar, String source, ParsePosition pos, int maxWidth); } /** * A strategy to parse a single field from the parsing pattern */ private abstract static class PatternStrategy extends Strategy { Pattern pattern; void createPattern(final StringBuilder regex) { createPattern(regex.toString()); } void createPattern(final String regex) { this.pattern = Pattern.compile(regex); } /** * Is this field a number? The default implementation returns false. * * @return true, if field is a number */ @Override boolean isNumber() { return false; } @Override boolean parse(final FastDateParser parser, final Calendar calendar, final String source, final ParsePosition pos, final int maxWidth) { final Matcher matcher = pattern.matcher(source.substring(pos.getIndex())); if (!matcher.lookingAt()) { pos.setErrorIndex(pos.getIndex()); return false; } pos.setIndex(pos.getIndex() + matcher.end(1)); setCalendar(parser, calendar, matcher.group(1)); return true; } abstract void setCalendar(FastDateParser parser, Calendar cal, String value); /** * Converts this instance to a handy debug string. * * @since 3.12.0 */ @Override public String toString() { return getClass().getSimpleName() + " [pattern=" + pattern + "]"; } } /** * Gets a Strategy given a field from a SimpleDateFormat pattern * @param f A sub-sequence of the SimpleDateFormat pattern * @param definingCalendar The calendar to obtain the short and long values * @return The Strategy that will handle parsing for the field */ private Strategy getStrategy(final char f, final int width, final Calendar definingCalendar) { switch (f) { default: throw new IllegalArgumentException("Format '" + f + "' not supported"); case 'D': return DAY_OF_YEAR_STRATEGY; case 'E': return getLocaleSpecificStrategy(Calendar.DAY_OF_WEEK, definingCalendar); case 'F': return DAY_OF_WEEK_IN_MONTH_STRATEGY; case 'G': return getLocaleSpecificStrategy(Calendar.ERA, definingCalendar); case 'H': // Hour in day (0-23) return HOUR_OF_DAY_STRATEGY; case 'K': // Hour in am/pm (0-11) return HOUR_STRATEGY; case 'M': return width >= 3 ? getLocaleSpecificStrategy(Calendar.MONTH, definingCalendar) : NUMBER_MONTH_STRATEGY; case 'S': return MILLISECOND_STRATEGY; case 'W': return WEEK_OF_MONTH_STRATEGY; case 'a': return getLocaleSpecificStrategy(Calendar.AM_PM, definingCalendar); case 'd': return DAY_OF_MONTH_STRATEGY; case 'h': // Hour in am/pm (1-12), i.e. midday/midnight is 12, not 0 return HOUR12_STRATEGY; case 'k': // Hour in day (1-24), i.e. midnight is 24, not 0 return HOUR24_OF_DAY_STRATEGY; case 'm': return MINUTE_STRATEGY; case 's': return SECOND_STRATEGY; case 'u': return DAY_OF_WEEK_STRATEGY; case 'w': return WEEK_OF_YEAR_STRATEGY; case 'y': case 'Y': return width > 2 ? LITERAL_YEAR_STRATEGY : ABBREVIATED_YEAR_STRATEGY; case 'X': return ISO8601TimeZoneStrategy.getStrategy(width); case 'Z': if (width == 2) { return ISO8601TimeZoneStrategy.ISO_8601_3_STRATEGY; } //$FALL-THROUGH$ case 'z': return getLocaleSpecificStrategy(Calendar.ZONE_OFFSET, definingCalendar); } } @SuppressWarnings("unchecked") // OK because we are creating an array with no entries private static final ConcurrentMap<Locale, Strategy>[] caches = new ConcurrentMap[Calendar.FIELD_COUNT]; /** * Gets a cache of Strategies for a particular field * @param field The Calendar field * @return a cache of Locale to Strategy */ private static ConcurrentMap<Locale, Strategy> getCache(final int field) { synchronized (caches) { if (caches[field] == null) { caches[field] = new ConcurrentHashMap<>(3); } return caches[field]; } } /** * Constructs a Strategy that parses a Text field * @param field The Calendar field * @param definingCalendar The calendar to obtain the short and long values * @return a TextStrategy for the field and Locale */ private Strategy getLocaleSpecificStrategy(final int field, final Calendar definingCalendar) { final ConcurrentMap<Locale, Strategy> cache = getCache(field); Strategy strategy = cache.get(locale); if (strategy == null) { strategy = field == Calendar.ZONE_OFFSET ? new TimeZoneStrategy(locale) : new CaseInsensitiveTextStrategy(field, definingCalendar, locale); final Strategy inCache = cache.putIfAbsent(locale, strategy); if (inCache != null) { return inCache; } } return strategy; } /** * A strategy that copies the static or quoted field in the parsing pattern */ private static class CopyQuotedStrategy extends Strategy { private final String formatField; /** * Constructs a Strategy that ensures the formatField has literal text * * @param formatField The literal text to match */ CopyQuotedStrategy(final String formatField) { this.formatField = formatField; } /** * {@inheritDoc} */ @Override boolean isNumber() { return false; } @Override boolean parse(final FastDateParser parser, final Calendar calendar, final String source, final ParsePosition pos, final int maxWidth) { for (int idx = 0; idx < formatField.length(); ++idx) { final int sIdx = idx + pos.getIndex(); if (sIdx == source.length()) { pos.setErrorIndex(sIdx); return false; } if (formatField.charAt(idx) != source.charAt(sIdx)) { pos.setErrorIndex(sIdx); return false; } } pos.setIndex(formatField.length() + pos.getIndex()); return true; } /** * Converts this instance to a handy debug string. * * @since 3.12.0 */ @Override public String toString() { return "CopyQuotedStrategy [formatField=" + formatField + "]"; } } /** * A strategy that handles a text field in the parsing pattern */ private static class CaseInsensitiveTextStrategy extends PatternStrategy { private final int field; final Locale locale; private final Map<String, Integer> lKeyValues; /** * Constructs a Strategy that parses a Text field * * @param field The Calendar field * @param definingCalendar The Calendar to use * @param locale The Locale to use */ CaseInsensitiveTextStrategy(final int field, final Calendar definingCalendar, final Locale locale) { this.field = field; this.locale = LocaleUtils.toLocale(locale); final StringBuilder regex = new StringBuilder(); regex.append("((?iu)"); lKeyValues = appendDisplayNames(definingCalendar, locale, field, regex); regex.setLength(regex.length() - 1); regex.append(")"); createPattern(regex); } /** * {@inheritDoc} */ @Override void setCalendar(final FastDateParser parser, final Calendar calendar, final String value) { final String lowerCase = value.toLowerCase(locale); Integer iVal = lKeyValues.get(lowerCase); if (iVal == null) { // match missing the optional trailing period iVal = lKeyValues.get(lowerCase + '.'); } calendar.set(field, iVal.intValue()); } /** * Converts this instance to a handy debug string. * * @since 3.12.0 */ @Override public String toString() { return "CaseInsensitiveTextStrategy [field=" + field + ", locale=" + locale + ", lKeyValues=" + lKeyValues + ", pattern=" + pattern + "]"; } } /** * A strategy that handles a number field in the parsing pattern */ private static class NumberStrategy extends Strategy { private final int field; /** * Constructs a Strategy that parses a Number field * * @param field The Calendar field */ NumberStrategy(final int field) { this.field = field; } /** * {@inheritDoc} */ @Override boolean isNumber() { return true; } @Override boolean parse(final FastDateParser parser, final Calendar calendar, final String source, final ParsePosition pos, final int maxWidth) { int idx = pos.getIndex(); int last = source.length(); if (maxWidth == 0) { // if no maxWidth, strip leading white space for (; idx < last; ++idx) { final char c = source.charAt(idx); if (!Character.isWhitespace(c)) { break; } } pos.setIndex(idx); } else { final int end = idx + maxWidth; if (last > end) { last = end; } } for (; idx < last; ++idx) { final char c = source.charAt(idx); if (!Character.isDigit(c)) { break; } } if (pos.getIndex() == idx) { pos.setErrorIndex(idx); return false; } final int value = Integer.parseInt(source.substring(pos.getIndex(), idx)); pos.setIndex(idx); calendar.set(field, modify(parser, value)); return true; } /** * Make any modifications to parsed integer * * @param parser The parser * @param iValue The parsed integer * @return The modified value */ int modify(final FastDateParser parser, final int iValue) { return iValue; } /** * Converts this instance to a handy debug string. * * @since 3.12.0 */ @Override public String toString() { return "NumberStrategy [field=" + field + "]"; } } private static final Strategy ABBREVIATED_YEAR_STRATEGY = new NumberStrategy(Calendar.YEAR) { /** * {@inheritDoc} */ @Override int modify(final FastDateParser parser, final int iValue) { return iValue < 100 ? parser.adjustYear(iValue) : iValue; } }; /** * A strategy that handles a time zone field in the parsing pattern */ static class TimeZoneStrategy extends PatternStrategy { private static final String RFC_822_TIME_ZONE = "[+-]\\d{4}"; private static final String GMT_OPTION = TimeZones.GMT_ID + "[+-]\\d{1,2}:\\d{2}"; private final Locale locale; private final Map<String, TzInfo> tzNames = new HashMap<>(); private static class TzInfo { final TimeZone zone; final int dstOffset; TzInfo(final TimeZone tz, final boolean useDst) { zone = tz; dstOffset = useDst ? tz.getDSTSavings() : 0; } } /** * Index of zone id */ private static final int ID = 0; /** * Constructs a Strategy that parses a TimeZone * * @param locale The Locale */ TimeZoneStrategy(final Locale locale) { this.locale = LocaleUtils.toLocale(locale); final StringBuilder sb = new StringBuilder(); sb.append("((?iu)" + RFC_822_TIME_ZONE + "|" + GMT_OPTION); final Set<String> sorted = new TreeSet<>(LONGER_FIRST_LOWERCASE); final String[][] zones = DateFormatSymbols.getInstance(locale).getZoneStrings(); for (final String[] zoneNames : zones) { // offset 0 is the time zone ID and is not localized final String tzId = zoneNames[ID]; if (tzId.equalsIgnoreCase(TimeZones.GMT_ID)) { continue; } final TimeZone tz = TimeZone.getTimeZone(tzId); // offset 1 is long standard name // offset 2 is short standard name final TzInfo standard = new TzInfo(tz, false); TzInfo tzInfo = standard; for (int i = 1; i < zoneNames.length; ++i) { switch (i) { case 3: // offset 3 is long daylight savings (or summertime) name // offset 4 is the short summertime name tzInfo = new TzInfo(tz, true); break; case 5: // offset 5 starts additional names, probably standard time tzInfo = standard; break; default: break; } if (zoneNames[i] != null) { final String key = zoneNames[i].toLowerCase(locale); // ignore the data associated with duplicates supplied in // the additional names if (sorted.add(key)) { tzNames.put(key, tzInfo); } } } } // order the regex alternatives with longer strings first, greedy // match will ensure longest string will be consumed for (final String zoneName : sorted) { simpleQuote(sb.append('|'), zoneName); } sb.append(")"); createPattern(sb); } /** * {@inheritDoc} */ @Override void setCalendar(final FastDateParser parser, final Calendar cal, final String timeZone) { final TimeZone tz = FastTimeZone.getGmtTimeZone(timeZone); if (tz != null) { cal.setTimeZone(tz); } else { final String lowerCase = timeZone.toLowerCase(locale); TzInfo tzInfo = tzNames.get(lowerCase); if (tzInfo == null) { // match missing the optional trailing period tzInfo = tzNames.get(lowerCase + '.'); } cal.set(Calendar.DST_OFFSET, tzInfo.dstOffset); cal.set(Calendar.ZONE_OFFSET, tzInfo.zone.getRawOffset()); } } /** * Converts this instance to a handy debug string. * * @since 3.12.0 */ @Override public String toString() { return "TimeZoneStrategy [locale=" + locale + ", tzNames=" + tzNames + ", pattern=" + pattern + "]"; } } private static class ISO8601TimeZoneStrategy extends PatternStrategy { // Z, +hh, -hh, +hhmm, -hhmm, +hh:mm or -hh:mm /** * Constructs a Strategy that parses a TimeZone * @param pattern The Pattern */ ISO8601TimeZoneStrategy(final String pattern) { createPattern(pattern); } /** * {@inheritDoc} */ @Override void setCalendar(final FastDateParser parser, final Calendar cal, final String value) { cal.setTimeZone(FastTimeZone.getGmtTimeZone(value)); } private static final Strategy ISO_8601_1_STRATEGY = new ISO8601TimeZoneStrategy("(Z|(?:[+-]\\d{2}))"); private static final Strategy ISO_8601_2_STRATEGY = new ISO8601TimeZoneStrategy("(Z|(?:[+-]\\d{2}\\d{2}))"); private static final Strategy ISO_8601_3_STRATEGY = new ISO8601TimeZoneStrategy("(Z|(?:[+-]\\d{2}(?::)\\d{2}))"); /** * Factory method for ISO8601TimeZoneStrategies. * * @param tokenLen a token indicating the length of the TimeZone String to be formatted. * @return a ISO8601TimeZoneStrategy that can format TimeZone String of length {@code tokenLen}. If no such * strategy exists, an IllegalArgumentException will be thrown. */ static Strategy getStrategy(final int tokenLen) { switch(tokenLen) { case 1: return ISO_8601_1_STRATEGY; case 2: return ISO_8601_2_STRATEGY; case 3: return ISO_8601_3_STRATEGY; default: throw new IllegalArgumentException("invalid number of X"); } } } private static final Strategy NUMBER_MONTH_STRATEGY = new NumberStrategy(Calendar.MONTH) { @Override int modify(final FastDateParser parser, final int iValue) { return iValue-1; } }; private static final Strategy LITERAL_YEAR_STRATEGY = new NumberStrategy(Calendar.YEAR); private static final Strategy WEEK_OF_YEAR_STRATEGY = new NumberStrategy(Calendar.WEEK_OF_YEAR); private static final Strategy WEEK_OF_MONTH_STRATEGY = new NumberStrategy(Calendar.WEEK_OF_MONTH); private static final Strategy DAY_OF_YEAR_STRATEGY = new NumberStrategy(Calendar.DAY_OF_YEAR); private static final Strategy DAY_OF_MONTH_STRATEGY = new NumberStrategy(Calendar.DAY_OF_MONTH); private static final Strategy DAY_OF_WEEK_STRATEGY = new NumberStrategy(Calendar.DAY_OF_WEEK) { @Override int modify(final FastDateParser parser, final int iValue) { return iValue == 7 ? Calendar.SUNDAY : iValue + 1; } }; private static final Strategy DAY_OF_WEEK_IN_MONTH_STRATEGY = new NumberStrategy(Calendar.DAY_OF_WEEK_IN_MONTH); private static final Strategy HOUR_OF_DAY_STRATEGY = new NumberStrategy(Calendar.HOUR_OF_DAY); private static final Strategy HOUR24_OF_DAY_STRATEGY = new NumberStrategy(Calendar.HOUR_OF_DAY) { @Override int modify(final FastDateParser parser, final int iValue) { return iValue == 24 ? 0 : iValue; } }; private static final Strategy HOUR12_STRATEGY = new NumberStrategy(Calendar.HOUR) { @Override int modify(final FastDateParser parser, final int iValue) { return iValue == 12 ? 0 : iValue; } }; private static final Strategy HOUR_STRATEGY = new NumberStrategy(Calendar.HOUR); private static final Strategy MINUTE_STRATEGY = new NumberStrategy(Calendar.MINUTE); private static final Strategy SECOND_STRATEGY = new NumberStrategy(Calendar.SECOND); private static final Strategy MILLISECOND_STRATEGY = new NumberStrategy(Calendar.MILLISECOND); }
src/main/java/org/apache/commons/lang3/time/FastDateParser.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.commons.lang3.time; import java.io.IOException; import java.io.ObjectInputStream; import java.io.Serializable; import java.text.DateFormatSymbols; import java.text.ParseException; import java.text.ParsePosition; import java.util.ArrayList; import java.util.Calendar; import java.util.Comparator; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.ListIterator; import java.util.Locale; import java.util.Map; import java.util.Set; import java.util.TimeZone; import java.util.TreeSet; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.apache.commons.lang3.LocaleUtils; /** * <p>FastDateParser is a fast and thread-safe version of * {@link java.text.SimpleDateFormat}.</p> * * <p>To obtain a proxy to a FastDateParser, use {@link FastDateFormat#getInstance(String, TimeZone, Locale)} * or another variation of the factory methods of {@link FastDateFormat}.</p> * * <p>Since FastDateParser is thread safe, you can use a static member instance:</p> * <code> * private static final DateParser DATE_PARSER = FastDateFormat.getInstance("yyyy-MM-dd"); * </code> * * <p>This class can be used as a direct replacement for * {@code SimpleDateFormat} in most parsing situations. * This class is especially useful in multi-threaded server environments. * {@code SimpleDateFormat} is not thread-safe in any JDK version, * nor will it be as Sun has closed the * <a href="http://bugs.sun.com/bugdatabase/view_bug.do?bug_id=4228335">bug</a>/RFE. * </p> * * <p>Only parsing is supported by this class, but all patterns are compatible with * SimpleDateFormat.</p> * * <p>The class operates in lenient mode, so for example a time of 90 minutes is treated as 1 hour 30 minutes.</p> * * <p>Timing tests indicate this class is as about as fast as SimpleDateFormat * in single thread applications and about 25% faster in multi-thread applications.</p> * * @since 3.2 * @see FastDatePrinter */ public class FastDateParser implements DateParser, Serializable { /** * Required for serialization support. * * @see java.io.Serializable */ private static final long serialVersionUID = 3L; static final Locale JAPANESE_IMPERIAL = new Locale("ja", "JP", "JP"); // defining fields private final String pattern; private final TimeZone timeZone; private final Locale locale; private final int century; private final int startYear; // derived fields private transient List<StrategyAndWidth> patterns; // comparator used to sort regex alternatives // alternatives should be ordered longer first, and shorter last. ('february' before 'feb') // all entries must be lowercase by locale. private static final Comparator<String> LONGER_FIRST_LOWERCASE = Comparator.reverseOrder(); /** * <p>Constructs a new FastDateParser.</p> * * Use {@link FastDateFormat#getInstance(String, TimeZone, Locale)} or another variation of the * factory methods of {@link FastDateFormat} to get a cached FastDateParser instance. * * @param pattern non-null {@link java.text.SimpleDateFormat} compatible * pattern * @param timeZone non-null time zone to use * @param locale non-null locale */ protected FastDateParser(final String pattern, final TimeZone timeZone, final Locale locale) { this(pattern, timeZone, locale, null); } /** * <p>Constructs a new FastDateParser.</p> * * @param pattern non-null {@link java.text.SimpleDateFormat} compatible * pattern * @param timeZone non-null time zone to use * @param locale non-null locale * @param centuryStart The start of the century for 2 digit year parsing * * @since 3.5 */ protected FastDateParser(final String pattern, final TimeZone timeZone, final Locale locale, final Date centuryStart) { this.pattern = pattern; this.timeZone = timeZone; this.locale = LocaleUtils.toLocale(locale); final Calendar definingCalendar = Calendar.getInstance(timeZone, this.locale); final int centuryStartYear; if (centuryStart != null) { definingCalendar.setTime(centuryStart); centuryStartYear = definingCalendar.get(Calendar.YEAR); } else if (this.locale.equals(JAPANESE_IMPERIAL)) { centuryStartYear = 0; } else { // from 80 years ago to 20 years from now definingCalendar.setTime(new Date()); centuryStartYear = definingCalendar.get(Calendar.YEAR) - 80; } century = centuryStartYear / 100 * 100; startYear = centuryStartYear - century; init(definingCalendar); } /** * Initializes derived fields from defining fields. * This is called from constructor and from readObject (de-serialization) * * @param definingCalendar the {@link java.util.Calendar} instance used to initialize this FastDateParser */ private void init(final Calendar definingCalendar) { patterns = new ArrayList<>(); final StrategyParser fm = new StrategyParser(definingCalendar); for (;;) { final StrategyAndWidth field = fm.getNextStrategy(); if (field == null) { break; } patterns.add(field); } } // helper classes to parse the format string //----------------------------------------------------------------------- /** * Holds strategy and field width */ private static class StrategyAndWidth { final Strategy strategy; final int width; StrategyAndWidth(final Strategy strategy, final int width) { this.strategy = strategy; this.width = width; } int getMaxWidth(final ListIterator<StrategyAndWidth> lt) { if (!strategy.isNumber() || !lt.hasNext()) { return 0; } final Strategy nextStrategy = lt.next().strategy; lt.previous(); return nextStrategy.isNumber() ? width : 0; } @Override public String toString() { return "StrategyAndWidth [strategy=" + strategy + ", width=" + width + "]"; } } /** * Parse format into Strategies */ private class StrategyParser { private final Calendar definingCalendar; private int currentIdx; StrategyParser(final Calendar definingCalendar) { this.definingCalendar = definingCalendar; } StrategyAndWidth getNextStrategy() { if (currentIdx >= pattern.length()) { return null; } final char c = pattern.charAt(currentIdx); if (isFormatLetter(c)) { return letterPattern(c); } return literal(); } private StrategyAndWidth letterPattern(final char c) { final int begin = currentIdx; while (++currentIdx < pattern.length()) { if (pattern.charAt(currentIdx) != c) { break; } } final int width = currentIdx - begin; return new StrategyAndWidth(getStrategy(c, width, definingCalendar), width); } private StrategyAndWidth literal() { boolean activeQuote = false; final StringBuilder sb = new StringBuilder(); while (currentIdx < pattern.length()) { final char c = pattern.charAt(currentIdx); if (!activeQuote && isFormatLetter(c)) { break; } else if (c == '\'' && (++currentIdx == pattern.length() || pattern.charAt(currentIdx) != '\'')) { activeQuote = !activeQuote; continue; } ++currentIdx; sb.append(c); } if (activeQuote) { throw new IllegalArgumentException("Unterminated quote"); } final String formatField = sb.toString(); return new StrategyAndWidth(new CopyQuotedStrategy(formatField), formatField.length()); } } private static boolean isFormatLetter(final char c) { return c >= 'A' && c <= 'Z' || c >= 'a' && c <= 'z'; } // Accessors //----------------------------------------------------------------------- /* (non-Javadoc) * @see org.apache.commons.lang3.time.DateParser#getPattern() */ @Override public String getPattern() { return pattern; } /* (non-Javadoc) * @see org.apache.commons.lang3.time.DateParser#getTimeZone() */ @Override public TimeZone getTimeZone() { return timeZone; } /* (non-Javadoc) * @see org.apache.commons.lang3.time.DateParser#getLocale() */ @Override public Locale getLocale() { return locale; } // Basics //----------------------------------------------------------------------- /** * <p>Compares another object for equality with this object.</p> * * @param obj the object to compare to * @return {@code true}if equal to this instance */ @Override public boolean equals(final Object obj) { if (!(obj instanceof FastDateParser)) { return false; } final FastDateParser other = (FastDateParser) obj; return pattern.equals(other.pattern) && timeZone.equals(other.timeZone) && locale.equals(other.locale); } /** * <p>Returns a hash code compatible with equals.</p> * * @return a hash code compatible with equals */ @Override public int hashCode() { return pattern.hashCode() + 13 * (timeZone.hashCode() + 13 * locale.hashCode()); } /** * <p>Gets a string version of this formatter.</p> * * @return a debugging string */ @Override public String toString() { return "FastDateParser[" + pattern + "," + locale + "," + timeZone.getID() + "]"; } // Serializing //----------------------------------------------------------------------- /** * Creates the object after serialization. This implementation reinitializes the * transient properties. * * @param in ObjectInputStream from which the object is being deserialized. * @throws IOException if there is an IO issue. * @throws ClassNotFoundException if a class cannot be found. */ private void readObject(final ObjectInputStream in) throws IOException, ClassNotFoundException { in.defaultReadObject(); final Calendar definingCalendar = Calendar.getInstance(timeZone, locale); init(definingCalendar); } /* (non-Javadoc) * @see org.apache.commons.lang3.time.DateParser#parseObject(java.lang.String) */ @Override public Object parseObject(final String source) throws ParseException { return parse(source); } /* (non-Javadoc) * @see org.apache.commons.lang3.time.DateParser#parse(java.lang.String) */ @Override public Date parse(final String source) throws ParseException { final ParsePosition pp = new ParsePosition(0); final Date date = parse(source, pp); if (date == null) { // Add a note re supported date range if (locale.equals(JAPANESE_IMPERIAL)) { throw new ParseException("(The " + locale + " locale does not support dates before 1868 AD)\n" + "Unparseable date: \"" + source, pp.getErrorIndex()); } throw new ParseException("Unparseable date: " + source, pp.getErrorIndex()); } return date; } /* (non-Javadoc) * @see org.apache.commons.lang3.time.DateParser#parseObject(java.lang.String, java.text.ParsePosition) */ @Override public Object parseObject(final String source, final ParsePosition pos) { return parse(source, pos); } /** * This implementation updates the ParsePosition if the parse succeeds. * However, it sets the error index to the position before the failed field unlike * the method {@link java.text.SimpleDateFormat#parse(String, ParsePosition)} which sets * the error index to after the failed field. * <p> * To determine if the parse has succeeded, the caller must check if the current parse position * given by {@link ParsePosition#getIndex()} has been updated. If the input buffer has been fully * parsed, then the index will point to just after the end of the input buffer. * * @see org.apache.commons.lang3.time.DateParser#parse(java.lang.String, java.text.ParsePosition) */ @Override public Date parse(final String source, final ParsePosition pos) { // timing tests indicate getting new instance is 19% faster than cloning final Calendar cal = Calendar.getInstance(timeZone, locale); cal.clear(); return parse(source, pos, cal) ? cal.getTime() : null; } /** * Parses a formatted date string according to the format. Updates the Calendar with parsed fields. * Upon success, the ParsePosition index is updated to indicate how much of the source text was consumed. * Not all source text needs to be consumed. Upon parse failure, ParsePosition error index is updated to * the offset of the source text which does not match the supplied format. * * @param source The text to parse. * @param pos On input, the position in the source to start parsing, on output, updated position. * @param calendar The calendar into which to set parsed fields. * @return true, if source has been parsed (pos parsePosition is updated); otherwise false (and pos errorIndex is updated) * @throws IllegalArgumentException when Calendar has been set to be not lenient, and a parsed field is * out of range. */ @Override public boolean parse(final String source, final ParsePosition pos, final Calendar calendar) { final ListIterator<StrategyAndWidth> lt = patterns.listIterator(); while (lt.hasNext()) { final StrategyAndWidth strategyAndWidth = lt.next(); final int maxWidth = strategyAndWidth.getMaxWidth(lt); if (!strategyAndWidth.strategy.parse(this, calendar, source, pos, maxWidth)) { return false; } } return true; } // Support for strategies //----------------------------------------------------------------------- private static StringBuilder simpleQuote(final StringBuilder sb, final String value) { for (int i = 0; i < value.length(); ++i) { final char c = value.charAt(i); switch (c) { case '\\': case '^': case '$': case '.': case '|': case '?': case '*': case '+': case '(': case ')': case '[': case '{': sb.append('\\'); default: sb.append(c); } } if (sb.charAt(sb.length() - 1) == '.') { // trailing '.' is optional sb.append('?'); } return sb; } /** * Gets the short and long values displayed for a field * @param cal The calendar to obtain the short and long values * @param locale The locale of display names * @param field The field of interest * @param regex The regular expression to build * @return The map of string display names to field values */ private static Map<String, Integer> appendDisplayNames(final Calendar cal, Locale locale, final int field, final StringBuilder regex) { final Map<String, Integer> values = new HashMap<>(); locale = LocaleUtils.toLocale(locale); final Map<String, Integer> displayNames = cal.getDisplayNames(field, Calendar.ALL_STYLES, locale); final TreeSet<String> sorted = new TreeSet<>(LONGER_FIRST_LOWERCASE); for (final Map.Entry<String, Integer> displayName : displayNames.entrySet()) { final String key = displayName.getKey().toLowerCase(locale); if (sorted.add(key)) { values.put(key, displayName.getValue()); } } for (final String symbol : sorted) { simpleQuote(regex, symbol).append('|'); } return values; } /** * Adjusts dates to be within appropriate century * @param twoDigitYear The year to adjust * @return A value between centuryStart(inclusive) to centuryStart+100(exclusive) */ private int adjustYear(final int twoDigitYear) { final int trial = century + twoDigitYear; return twoDigitYear >= startYear ? trial : trial + 100; } /** * A strategy to parse a single field from the parsing pattern */ private abstract static class Strategy { /** * Is this field a number? The default implementation returns false. * * @return true, if field is a number */ boolean isNumber() { return false; } abstract boolean parse(FastDateParser parser, Calendar calendar, String source, ParsePosition pos, int maxWidth); } /** * A strategy to parse a single field from the parsing pattern */ private abstract static class PatternStrategy extends Strategy { Pattern pattern; void createPattern(final StringBuilder regex) { createPattern(regex.toString()); } void createPattern(final String regex) { this.pattern = Pattern.compile(regex); } /** * Is this field a number? The default implementation returns false. * * @return true, if field is a number */ @Override boolean isNumber() { return false; } @Override boolean parse(final FastDateParser parser, final Calendar calendar, final String source, final ParsePosition pos, final int maxWidth) { final Matcher matcher = pattern.matcher(source.substring(pos.getIndex())); if (!matcher.lookingAt()) { pos.setErrorIndex(pos.getIndex()); return false; } pos.setIndex(pos.getIndex() + matcher.end(1)); setCalendar(parser, calendar, matcher.group(1)); return true; } abstract void setCalendar(FastDateParser parser, Calendar cal, String value); /** * Converts this instance to a handy debug string. * * @since 3.12.0 */ @Override public String toString() { return getClass().getSimpleName() + " [pattern=" + pattern + "]"; } } /** * Gets a Strategy given a field from a SimpleDateFormat pattern * @param f A sub-sequence of the SimpleDateFormat pattern * @param definingCalendar The calendar to obtain the short and long values * @return The Strategy that will handle parsing for the field */ private Strategy getStrategy(final char f, final int width, final Calendar definingCalendar) { switch (f) { default: throw new IllegalArgumentException("Format '" + f + "' not supported"); case 'D': return DAY_OF_YEAR_STRATEGY; case 'E': return getLocaleSpecificStrategy(Calendar.DAY_OF_WEEK, definingCalendar); case 'F': return DAY_OF_WEEK_IN_MONTH_STRATEGY; case 'G': return getLocaleSpecificStrategy(Calendar.ERA, definingCalendar); case 'H': // Hour in day (0-23) return HOUR_OF_DAY_STRATEGY; case 'K': // Hour in am/pm (0-11) return HOUR_STRATEGY; case 'M': return width >= 3 ? getLocaleSpecificStrategy(Calendar.MONTH, definingCalendar) : NUMBER_MONTH_STRATEGY; case 'S': return MILLISECOND_STRATEGY; case 'W': return WEEK_OF_MONTH_STRATEGY; case 'a': return getLocaleSpecificStrategy(Calendar.AM_PM, definingCalendar); case 'd': return DAY_OF_MONTH_STRATEGY; case 'h': // Hour in am/pm (1-12), i.e. midday/midnight is 12, not 0 return HOUR12_STRATEGY; case 'k': // Hour in day (1-24), i.e. midnight is 24, not 0 return HOUR24_OF_DAY_STRATEGY; case 'm': return MINUTE_STRATEGY; case 's': return SECOND_STRATEGY; case 'u': return DAY_OF_WEEK_STRATEGY; case 'w': return WEEK_OF_YEAR_STRATEGY; case 'y': case 'Y': return width > 2 ? LITERAL_YEAR_STRATEGY : ABBREVIATED_YEAR_STRATEGY; case 'X': return ISO8601TimeZoneStrategy.getStrategy(width); case 'Z': if (width == 2) { return ISO8601TimeZoneStrategy.ISO_8601_3_STRATEGY; } //$FALL-THROUGH$ case 'z': return getLocaleSpecificStrategy(Calendar.ZONE_OFFSET, definingCalendar); } } @SuppressWarnings("unchecked") // OK because we are creating an array with no entries private static final ConcurrentMap<Locale, Strategy>[] caches = new ConcurrentMap[Calendar.FIELD_COUNT]; /** * Gets a cache of Strategies for a particular field * @param field The Calendar field * @return a cache of Locale to Strategy */ private static ConcurrentMap<Locale, Strategy> getCache(final int field) { synchronized (caches) { if (caches[field] == null) { caches[field] = new ConcurrentHashMap<>(3); } return caches[field]; } } /** * Constructs a Strategy that parses a Text field * @param field The Calendar field * @param definingCalendar The calendar to obtain the short and long values * @return a TextStrategy for the field and Locale */ private Strategy getLocaleSpecificStrategy(final int field, final Calendar definingCalendar) { final ConcurrentMap<Locale, Strategy> cache = getCache(field); Strategy strategy = cache.get(locale); if (strategy == null) { strategy = field == Calendar.ZONE_OFFSET ? new TimeZoneStrategy(locale) : new CaseInsensitiveTextStrategy(field, definingCalendar, locale); final Strategy inCache = cache.putIfAbsent(locale, strategy); if (inCache != null) { return inCache; } } return strategy; } /** * A strategy that copies the static or quoted field in the parsing pattern */ private static class CopyQuotedStrategy extends Strategy { private final String formatField; /** * Constructs a Strategy that ensures the formatField has literal text * * @param formatField The literal text to match */ CopyQuotedStrategy(final String formatField) { this.formatField = formatField; } /** * {@inheritDoc} */ @Override boolean isNumber() { return false; } @Override boolean parse(final FastDateParser parser, final Calendar calendar, final String source, final ParsePosition pos, final int maxWidth) { for (int idx = 0; idx < formatField.length(); ++idx) { final int sIdx = idx + pos.getIndex(); if (sIdx == source.length()) { pos.setErrorIndex(sIdx); return false; } if (formatField.charAt(idx) != source.charAt(sIdx)) { pos.setErrorIndex(sIdx); return false; } } pos.setIndex(formatField.length() + pos.getIndex()); return true; } /** * Converts this instance to a handy debug string. * * @since 3.12.0 */ @Override public String toString() { return "CopyQuotedStrategy [formatField=" + formatField + "]"; } } /** * A strategy that handles a text field in the parsing pattern */ private static class CaseInsensitiveTextStrategy extends PatternStrategy { private final int field; final Locale locale; private final Map<String, Integer> lKeyValues; /** * Constructs a Strategy that parses a Text field * * @param field The Calendar field * @param definingCalendar The Calendar to use * @param locale The Locale to use */ CaseInsensitiveTextStrategy(final int field, final Calendar definingCalendar, final Locale locale) { this.field = field; this.locale = LocaleUtils.toLocale(locale); final StringBuilder regex = new StringBuilder(); regex.append("((?iu)"); lKeyValues = appendDisplayNames(definingCalendar, locale, field, regex); regex.setLength(regex.length() - 1); regex.append(")"); createPattern(regex); } /** * {@inheritDoc} */ @Override void setCalendar(final FastDateParser parser, final Calendar cal, final String value) { final String lowerCase = value.toLowerCase(locale); Integer iVal = lKeyValues.get(lowerCase); if (iVal == null) { // match missing the optional trailing period iVal = lKeyValues.get(lowerCase + '.'); } cal.set(field, iVal.intValue()); } /** * Converts this instance to a handy debug string. * * @since 3.12.0 */ @Override public String toString() { return "CaseInsensitiveTextStrategy [field=" + field + ", locale=" + locale + ", lKeyValues=" + lKeyValues + ", pattern=" + pattern + "]"; } } /** * A strategy that handles a number field in the parsing pattern */ private static class NumberStrategy extends Strategy { private final int field; /** * Constructs a Strategy that parses a Number field * * @param field The Calendar field */ NumberStrategy(final int field) { this.field = field; } /** * {@inheritDoc} */ @Override boolean isNumber() { return true; } @Override boolean parse(final FastDateParser parser, final Calendar calendar, final String source, final ParsePosition pos, final int maxWidth) { int idx = pos.getIndex(); int last = source.length(); if (maxWidth == 0) { // if no maxWidth, strip leading white space for (; idx < last; ++idx) { final char c = source.charAt(idx); if (!Character.isWhitespace(c)) { break; } } pos.setIndex(idx); } else { final int end = idx + maxWidth; if (last > end) { last = end; } } for (; idx < last; ++idx) { final char c = source.charAt(idx); if (!Character.isDigit(c)) { break; } } if (pos.getIndex() == idx) { pos.setErrorIndex(idx); return false; } final int value = Integer.parseInt(source.substring(pos.getIndex(), idx)); pos.setIndex(idx); calendar.set(field, modify(parser, value)); return true; } /** * Make any modifications to parsed integer * * @param parser The parser * @param iValue The parsed integer * @return The modified value */ int modify(final FastDateParser parser, final int iValue) { return iValue; } /** * Converts this instance to a handy debug string. * * @since 3.12.0 */ @Override public String toString() { return "NumberStrategy [field=" + field + "]"; } } private static final Strategy ABBREVIATED_YEAR_STRATEGY = new NumberStrategy(Calendar.YEAR) { /** * {@inheritDoc} */ @Override int modify(final FastDateParser parser, final int iValue) { return iValue < 100 ? parser.adjustYear(iValue) : iValue; } }; /** * A strategy that handles a time zone field in the parsing pattern */ static class TimeZoneStrategy extends PatternStrategy { private static final String RFC_822_TIME_ZONE = "[+-]\\d{4}"; private static final String GMT_OPTION = TimeZones.GMT_ID + "[+-]\\d{1,2}:\\d{2}"; private final Locale locale; private final Map<String, TzInfo> tzNames = new HashMap<>(); private static class TzInfo { final TimeZone zone; final int dstOffset; TzInfo(final TimeZone tz, final boolean useDst) { zone = tz; dstOffset = useDst ? tz.getDSTSavings() : 0; } } /** * Index of zone id */ private static final int ID = 0; /** * Constructs a Strategy that parses a TimeZone * * @param locale The Locale */ TimeZoneStrategy(final Locale locale) { this.locale = LocaleUtils.toLocale(locale); final StringBuilder sb = new StringBuilder(); sb.append("((?iu)" + RFC_822_TIME_ZONE + "|" + GMT_OPTION); final Set<String> sorted = new TreeSet<>(LONGER_FIRST_LOWERCASE); final String[][] zones = DateFormatSymbols.getInstance(locale).getZoneStrings(); for (final String[] zoneNames : zones) { // offset 0 is the time zone ID and is not localized final String tzId = zoneNames[ID]; if (tzId.equalsIgnoreCase(TimeZones.GMT_ID)) { continue; } final TimeZone tz = TimeZone.getTimeZone(tzId); // offset 1 is long standard name // offset 2 is short standard name final TzInfo standard = new TzInfo(tz, false); TzInfo tzInfo = standard; for (int i = 1; i < zoneNames.length; ++i) { switch (i) { case 3: // offset 3 is long daylight savings (or summertime) name // offset 4 is the short summertime name tzInfo = new TzInfo(tz, true); break; case 5: // offset 5 starts additional names, probably standard time tzInfo = standard; break; default: break; } if (zoneNames[i] != null) { final String key = zoneNames[i].toLowerCase(locale); // ignore the data associated with duplicates supplied in // the additional names if (sorted.add(key)) { tzNames.put(key, tzInfo); } } } } // order the regex alternatives with longer strings first, greedy // match will ensure longest string will be consumed for (final String zoneName : sorted) { simpleQuote(sb.append('|'), zoneName); } sb.append(")"); createPattern(sb); } /** * {@inheritDoc} */ @Override void setCalendar(final FastDateParser parser, final Calendar cal, final String timeZone) { final TimeZone tz = FastTimeZone.getGmtTimeZone(timeZone); if (tz != null) { cal.setTimeZone(tz); } else { final String lowerCase = timeZone.toLowerCase(locale); TzInfo tzInfo = tzNames.get(lowerCase); if (tzInfo == null) { // match missing the optional trailing period tzInfo = tzNames.get(lowerCase + '.'); } cal.set(Calendar.DST_OFFSET, tzInfo.dstOffset); cal.set(Calendar.ZONE_OFFSET, tzInfo.zone.getRawOffset()); } } /** * Converts this instance to a handy debug string. * * @since 3.12.0 */ @Override public String toString() { return "TimeZoneStrategy [locale=" + locale + ", tzNames=" + tzNames + ", pattern=" + pattern + "]"; } } private static class ISO8601TimeZoneStrategy extends PatternStrategy { // Z, +hh, -hh, +hhmm, -hhmm, +hh:mm or -hh:mm /** * Constructs a Strategy that parses a TimeZone * @param pattern The Pattern */ ISO8601TimeZoneStrategy(final String pattern) { createPattern(pattern); } /** * {@inheritDoc} */ @Override void setCalendar(final FastDateParser parser, final Calendar cal, final String value) { cal.setTimeZone(FastTimeZone.getGmtTimeZone(value)); } private static final Strategy ISO_8601_1_STRATEGY = new ISO8601TimeZoneStrategy("(Z|(?:[+-]\\d{2}))"); private static final Strategy ISO_8601_2_STRATEGY = new ISO8601TimeZoneStrategy("(Z|(?:[+-]\\d{2}\\d{2}))"); private static final Strategy ISO_8601_3_STRATEGY = new ISO8601TimeZoneStrategy("(Z|(?:[+-]\\d{2}(?::)\\d{2}))"); /** * Factory method for ISO8601TimeZoneStrategies. * * @param tokenLen a token indicating the length of the TimeZone String to be formatted. * @return a ISO8601TimeZoneStrategy that can format TimeZone String of length {@code tokenLen}. If no such * strategy exists, an IllegalArgumentException will be thrown. */ static Strategy getStrategy(final int tokenLen) { switch(tokenLen) { case 1: return ISO_8601_1_STRATEGY; case 2: return ISO_8601_2_STRATEGY; case 3: return ISO_8601_3_STRATEGY; default: throw new IllegalArgumentException("invalid number of X"); } } } private static final Strategy NUMBER_MONTH_STRATEGY = new NumberStrategy(Calendar.MONTH) { @Override int modify(final FastDateParser parser, final int iValue) { return iValue-1; } }; private static final Strategy LITERAL_YEAR_STRATEGY = new NumberStrategy(Calendar.YEAR); private static final Strategy WEEK_OF_YEAR_STRATEGY = new NumberStrategy(Calendar.WEEK_OF_YEAR); private static final Strategy WEEK_OF_MONTH_STRATEGY = new NumberStrategy(Calendar.WEEK_OF_MONTH); private static final Strategy DAY_OF_YEAR_STRATEGY = new NumberStrategy(Calendar.DAY_OF_YEAR); private static final Strategy DAY_OF_MONTH_STRATEGY = new NumberStrategy(Calendar.DAY_OF_MONTH); private static final Strategy DAY_OF_WEEK_STRATEGY = new NumberStrategy(Calendar.DAY_OF_WEEK) { @Override int modify(final FastDateParser parser, final int iValue) { return iValue == 7 ? Calendar.SUNDAY : iValue + 1; } }; private static final Strategy DAY_OF_WEEK_IN_MONTH_STRATEGY = new NumberStrategy(Calendar.DAY_OF_WEEK_IN_MONTH); private static final Strategy HOUR_OF_DAY_STRATEGY = new NumberStrategy(Calendar.HOUR_OF_DAY); private static final Strategy HOUR24_OF_DAY_STRATEGY = new NumberStrategy(Calendar.HOUR_OF_DAY) { @Override int modify(final FastDateParser parser, final int iValue) { return iValue == 24 ? 0 : iValue; } }; private static final Strategy HOUR12_STRATEGY = new NumberStrategy(Calendar.HOUR) { @Override int modify(final FastDateParser parser, final int iValue) { return iValue == 12 ? 0 : iValue; } }; private static final Strategy HOUR_STRATEGY = new NumberStrategy(Calendar.HOUR); private static final Strategy MINUTE_STRATEGY = new NumberStrategy(Calendar.MINUTE); private static final Strategy SECOND_STRATEGY = new NumberStrategy(Calendar.SECOND); private static final Strategy MILLISECOND_STRATEGY = new NumberStrategy(Calendar.MILLISECOND); }
Better internal name.
src/main/java/org/apache/commons/lang3/time/FastDateParser.java
Better internal name.
<ide><path>rc/main/java/org/apache/commons/lang3/time/FastDateParser.java <ide> * {@inheritDoc} <ide> */ <ide> @Override <del> void setCalendar(final FastDateParser parser, final Calendar cal, final String value) { <add> void setCalendar(final FastDateParser parser, final Calendar calendar, final String value) { <ide> final String lowerCase = value.toLowerCase(locale); <ide> Integer iVal = lKeyValues.get(lowerCase); <ide> if (iVal == null) { <ide> // match missing the optional trailing period <ide> iVal = lKeyValues.get(lowerCase + '.'); <ide> } <del> cal.set(field, iVal.intValue()); <add> calendar.set(field, iVal.intValue()); <ide> } <ide> <ide> /**
JavaScript
apache-2.0
bb8ad5f9416ebec7df1db5d116e0b72940e6ff32
0
weilonge/unidisk,weilonge/unidisk,weilonge/unidisk
var pcs = require("../clouddrive/pcs"); var async = require('async'); var fs = require('fs'); var UD_BLOCK_SIZE = 1*1024*1024; var UD_QUEUE_SIZE = 3; var UD_CACHE_PATH = "/tmp/ud/cache"; var udManager = {}; udManager._writeCache = function (task, data, cb){ fs.writeFile(UD_CACHE_PATH + "/" + task.md5sum, data, function(err) { if(err) { console.log(err); } else { console.log("The file was saved!"); } cb(); }); } udManager._readCache = function (path, offset, size, requestList, cb){ var buffer = new Buffer(size), seek = 0, writeSize = 0, cursor_moved = 0; for(var i in requestList ){ var task = requestList[i]; if( this.FileDataCache[task.md5sum] && this.FileDataCache[task.md5sum].status === "DONE" ){ seek = ( offset + cursor_moved ) % UD_BLOCK_SIZE; writeSize = UD_BLOCK_SIZE - seek; if( (writeSize + cursor_moved ) > size ){ writeSize = size - cursor_moved; } var fd = fs.openSync(UD_CACHE_PATH + "/" + task.md5sum, "rs"); fs.readSync(fd, buffer, cursor_moved, writeSize, seek); fs.closeSync(fd); cursor_moved += writeSize ; } else { console.error("======= Critical Error ======="); console.error(path); console.error(offset); console.error(size); console.error(requestList); console.error(this.FileDataCache); throw Error("data is not finished."); } } cb(buffer); } udManager.init = function(){ this.FileMetaCache = {}; this.FileListCache = {}; this.FileDataCache = {}; this.FileDownloadQueue = async.queue(function (task, callback) { console.log(' [B] ' + task.path + "|" + task.offset + '| downloading...'); task.status = "DOWNLOADING"; udManager.FileDataCache[task.md5sum] = task; udManager.downloadFileInRange(task.path, task.offset, task.size, function(error, response){ console.log(task.path + "|" + task.offset + '| done!! ' + response.data.length); // Write the buffer to the cache file. udManager._writeCache(task, response.data, function(){ udManager.FileDataCache[task.md5sum].status = "DONE"; callback(); }); }); }, UD_QUEUE_SIZE); } udManager.showStat = function (cb) { pcs.quota(cb); } udManager.getFileMeta = function (path, cb) { if( this.FileMetaCache.hasOwnProperty(path) ){ cb(null, { data : this.FileMetaCache[path] }); }else{ var retry = function () { pcs.getFileMeta(path, function(error, response){ if(error){ console.log("" + new Date () + "| " + error); retry(); }else{ udManager.FileMetaCache[path] = response.data; cb(error, response); } }); } retry(); } } udManager.getFileList = function (path, cb) { if( this.FileListCache.hasOwnProperty(path) ){ cb(null, { data : this.FileListCache[path] }); }else{ pcs.getFileList(path, function(error, response){ udManager.FileListCache[path] = response.data; cb(error, response); }); } } udManager._genmd5sum = function (task){ var crypto = require('crypto'); var name = task.path + "" + task.offset + ""; var hash = crypto.createHash('md5').update(name).digest('hex'); return hash; } udManager._generateRequestList = function(fileMeta, offset, size, fileSize){ const endPos = offset + size; var requestList = []; var alignedOffset = Math.floor( offset / UD_BLOCK_SIZE) * UD_BLOCK_SIZE; for(; alignedOffset < endPos && alignedOffset < fileSize; alignedOffset += UD_BLOCK_SIZE ){ var task = { path: fileMeta.path, totalSize: fileMeta.size, mtime: fileMeta.mtime, status: "INIT", priority: "HIGH", md5sum: "", offset: alignedOffset, size: ((alignedOffset + UD_BLOCK_SIZE) > fileSize ? (fileSize - alignedOffset) : UD_BLOCK_SIZE ) }; var taskMd5sum = this._genmd5sum(task); task.md5sum = taskMd5sum; requestList.push(task); } return requestList; } udManager._isAllRequestDone = function (downloadRequest){ var done = true; for(var req in downloadRequest){ var taskMd5sum = downloadRequest[req].md5sum; if(this.FileDataCache[taskMd5sum] && this.FileDataCache[taskMd5sum].status === "DONE" ){ // do nothing. }else{ done = false; break; } } return done; } udManager._requestPushAndDownload = function (path, downloadRequest, cb){ async.each(downloadRequest, function(task, callback){ var taskMd5sum = task.md5sum; if(udManager.FileDataCache[taskMd5sum]){ console.log(' [C1] ' + udManager.FileDataCache[taskMd5sum].path + " is in cache: " + udManager.FileDataCache[taskMd5sum].status); callback(); }else{ noNewTask = false; udManager.FileDownloadQueue.push(task, function (err){ console.log(' [C2] ' + 'pushed task is done.'); callback(); }); } }, function(err){ // Verify the download request is all finished or not. if( udManager._isAllRequestDone(downloadRequest) ){ console.log(' [D] ' + 'All requests are done.'); cb(); } }); } udManager.downloadFileInRangeByCache = function(path, offset, size, cb) { console.log('{{'); console.log(' [A] ' + path + ' ' + offset + ' ' + size); udManager.getFileMeta(path, function(error, response){ const totalSize = response.data.list[0].size; // 1. Split the download request. var requestList = udManager._generateRequestList(response.data.list[0], parseInt(offset), parseInt(size), totalSize); // 2. Push downloading request. udManager._requestPushAndDownload(path, requestList, function(){ // 3. All requests are done. Aggregate all data. // Read the request data from files. udManager._readCache(path, offset, size, requestList, function(data){ console.log(' [E] data is prepared.'); console.log('}}'); cb(null, { data: data }); }); }); }); } udManager.downloadFileInRange = function(path, offset, size, cb) { var retry = function () { pcs.getFileDownload(path, offset, size, function(error, response){ if(error){ console.log('[ERROR] retry, error happened: ' + error); retry(); }if( !response || !response.data instanceof Buffer ){ console.log('[ERROR] retry, error response: ' + response); retry(); }if( size != response.data.length ){ console.log('[ERROR] retry, size error: ' + size + " " + response.data.length ); retry(); }else{ cb(error, response); } }); } retry(); } udManager.downloadFileInMultiRange = function(path, list, cb) { var listArray = null; if( typeof list === "string" ){ try { listArray = JSON.parse(list).list; } catch (e) { cb("Incorrect download list.", null); return ; } }else{ listArray = list.list; } async.each(listArray, function(item, callback){ udManager.downloadFileInRange(path, item.offset, item.size, function(error, response){ console.log(response.data); callback(); }); }, function(){ cb(null, { data: "OK!" }); }); } module.exports = udManager;
helper/udManager.js
var pcs = require("../clouddrive/pcs"); var async = require('async'); var fs = require('fs'); var UD_BLOCK_SIZE = 1*1024*1024; var UD_QUEUE_SIZE = 3; var UD_CACHE_PATH = "/tmp/ud/cache"; var udManager = {}; udManager._writeCache = function (task, data, cb){ fs.writeFile(UD_CACHE_PATH + "/" + task.md5sum, data, function(err) { if(err) { console.log(err); } else { console.log("The file was saved!"); } cb(); }); } udManager._readCache = function (path, offset, size, requestList, cb){ var buffer = new Buffer(size), seek = 0, writeSize = 0, cursor_moved = 0; for(var i in requestList ){ var task = requestList[i]; if( this.FileDataCache[task.md5sum] && this.FileDataCache[task.md5sum].status === "DONE" ){ seek = ( offset + cursor_moved ) % UD_BLOCK_SIZE; writeSize = UD_BLOCK_SIZE - seek; if( (writeSize + cursor_moved ) > size ){ writeSize = size - cursor_moved; } var fd = fs.openSync(UD_CACHE_PATH + "/" + task.md5sum, "rs"); fs.readSync(fd, buffer, cursor_moved, writeSize, seek); fs.closeSync(fd); cursor_moved += writeSize ; } else { console.error("======= Critical Error ======="); console.error(path); console.error(offset); console.error(size); console.error(requestList); console.error(this.FileDataCache); throw Error("data is not finished."); } } cb(buffer); } udManager.init = function(){ this.FileMetaCache = {}; this.FileListCache = {}; this.FileDataCache = {}; this.FileDownloadQueue = async.queue(function (task, callback) { console.log(task.path + "|" + task.offset + '| downloading...'); task.status = "DOWNLOADING"; udManager.FileDataCache[task.md5sum] = task; udManager.downloadFileInRange(task.path, task.offset, task.size, function(error, response){ console.log(task.path + "|" + task.offset + '| done!! ' + response.data.length); // Write the buffer to the cache file. udManager._writeCache(task, response.data, function(){ udManager.FileDataCache[task.md5sum].status = "DONE"; callback(); }); }); }, UD_QUEUE_SIZE); } udManager.showStat = function (cb) { pcs.quota(cb); } udManager.getFileMeta = function (path, cb) { if( this.FileMetaCache.hasOwnProperty(path) ){ cb(null, { data : this.FileMetaCache[path] }); }else{ var retry = function () { pcs.getFileMeta(path, function(error, response){ if(error){ console.log("" + new Date () + "| " + error); retry(); }else{ udManager.FileMetaCache[path] = response.data; cb(error, response); } }); } retry(); } } udManager.getFileList = function (path, cb) { if( this.FileListCache.hasOwnProperty(path) ){ cb(null, { data : this.FileListCache[path] }); }else{ pcs.getFileList(path, function(error, response){ udManager.FileListCache[path] = response.data; cb(error, response); }); } } udManager._genmd5sum = function (task){ var crypto = require('crypto'); var name = task.path + "" + task.offset + ""; var hash = crypto.createHash('md5').update(name).digest('hex'); return hash; } udManager._generateRequestList = function(fileMeta, offset, size, fileSize){ const endPos = offset + size; var requestList = []; var alignedOffset = Math.floor( offset / UD_BLOCK_SIZE) * UD_BLOCK_SIZE; for(; alignedOffset < endPos && alignedOffset < fileSize; alignedOffset += UD_BLOCK_SIZE ){ var task = { path: fileMeta.path, totalSize: fileMeta.size, mtime: fileMeta.mtime, status: "INIT", priority: "HIGH", md5sum: "", offset: alignedOffset, size: ((alignedOffset + UD_BLOCK_SIZE) > fileSize ? (fileSize - alignedOffset) : UD_BLOCK_SIZE ) }; var taskMd5sum = this._genmd5sum(task); task.md5sum = taskMd5sum; requestList.push(task); } return requestList; } udManager._isAllRequestDone = function (downloadRequest){ var done = true; for(var req in downloadRequest){ var taskMd5sum = downloadRequest[req].md5sum; if(this.FileDataCache[taskMd5sum] && this.FileDataCache[taskMd5sum].status === "DONE" ){ // do nothing. }else{ done = false; break; } } console.log("======= Test request done ======="); console.log(this.FileDataCache); console.log(done); console.log(downloadRequest); return done; } udManager._requestPushAndDownload = function (path, downloadRequest, cb){ async.each(downloadRequest, function(task, callback){ var taskMd5sum = task.md5sum; if(udManager.FileDataCache[taskMd5sum]){ console.log('##[B.1] ' + udManager.FileDataCache[taskMd5sum].path + " is in cache: " + udManager.FileDataCache[taskMd5sum].status); callback(); }else{ noNewTask = false; udManager.FileDownloadQueue.push(task, function (err){ console.log('##[B.2] ' + 'pushed task is done.'); callback(); }); } }, function(err){ // Verify the download request is all finished or not. if( udManager._isAllRequestDone(downloadRequest) ){ console.log('##[C] ' + 'All requests are done.'); cb(); } }); } udManager.downloadFileInRangeByCache = function(path, offset, size, cb) { console.log('##### ' + path + ' ' + offset + ' ' + size); udManager.getFileMeta(path, function(error, response){ const totalSize = response.data.list[0].size; // 1. Split the download request. var requestList = udManager._generateRequestList(response.data.list[0], parseInt(offset), parseInt(size), totalSize); // 2. Push downloading request. udManager._requestPushAndDownload(path, requestList, function(){ // 3. All requests are done. Aggregate all data. console.log('yeah!!! all requests are done.'); // Read the request data from files. udManager._readCache(path, offset, size, requestList, function(data){ console.log('data is prepared.'); cb(null, { data: data }); }); }); }); } udManager.downloadFileInRange = function(path, offset, size, cb) { var retry = function () { pcs.getFileDownload(path, offset, size, function(error, response){ console.log('B: ' + error); if(error){ console.log('[ERROR] retry, error happened: ' + error); retry(); }if( !response || !response.data instanceof Buffer ){ console.log('[ERROR] retry, error response: ' + response); retry(); }if( size != response.data.length ){ console.log('[ERROR] retry, size error: ' + size + " " + response.data.length ); retry(); }else{ cb(error, response); } }); } retry(); } udManager.downloadFileInMultiRange = function(path, list, cb) { var listArray = null; if( typeof list === "string" ){ try { listArray = JSON.parse(list).list; } catch (e) { cb("Incorrect download list.", null); return ; } }else{ listArray = list.list; } async.each(listArray, function(item, callback){ udManager.downloadFileInRange(path, item.offset, item.size, function(error, response){ console.log(response.data); callback(); }); }, function(){ cb(null, { data: "OK!" }); }); } module.exports = udManager;
Improve the log format.
helper/udManager.js
Improve the log format.
<ide><path>elper/udManager.js <ide> this.FileListCache = {}; <ide> this.FileDataCache = {}; <ide> this.FileDownloadQueue = async.queue(function (task, callback) { <del> console.log(task.path + "|" + task.offset + '| downloading...'); <add> console.log(' [B] ' + task.path + "|" + task.offset + '| downloading...'); <ide> task.status = "DOWNLOADING"; <ide> udManager.FileDataCache[task.md5sum] = task; <ide> udManager.downloadFileInRange(task.path, task.offset, task.size, function(error, response){ <ide> break; <ide> } <ide> } <del> console.log("======= Test request done ======="); <del> console.log(this.FileDataCache); <del> console.log(done); <del> console.log(downloadRequest); <ide> return done; <ide> } <ide> <ide> var taskMd5sum = task.md5sum; <ide> <ide> if(udManager.FileDataCache[taskMd5sum]){ <del> console.log('##[B.1] ' + udManager.FileDataCache[taskMd5sum].path + " is in cache: " + udManager.FileDataCache[taskMd5sum].status); <add> console.log(' [C1] ' + udManager.FileDataCache[taskMd5sum].path + " is in cache: " + udManager.FileDataCache[taskMd5sum].status); <ide> callback(); <ide> }else{ <ide> noNewTask = false; <ide> udManager.FileDownloadQueue.push(task, function (err){ <del> console.log('##[B.2] ' + 'pushed task is done.'); <add> console.log(' [C2] ' + 'pushed task is done.'); <ide> callback(); <ide> }); <ide> } <ide> }, function(err){ <ide> // Verify the download request is all finished or not. <ide> if( udManager._isAllRequestDone(downloadRequest) ){ <del> console.log('##[C] ' + 'All requests are done.'); <add> console.log(' [D] ' + 'All requests are done.'); <ide> cb(); <ide> } <ide> }); <ide> } <ide> <ide> udManager.downloadFileInRangeByCache = function(path, offset, size, cb) { <del> console.log('##### ' + path + ' ' + offset + ' ' + size); <add> console.log('{{'); <add> console.log(' [A] ' + path + ' ' + offset + ' ' + size); <ide> udManager.getFileMeta(path, function(error, response){ <ide> const totalSize = response.data.list[0].size; <ide> // 1. Split the download request. <ide> // 2. Push downloading request. <ide> udManager._requestPushAndDownload(path, requestList, function(){ <ide> // 3. All requests are done. Aggregate all data. <del> console.log('yeah!!! all requests are done.'); <ide> // Read the request data from files. <ide> udManager._readCache(path, offset, size, requestList, function(data){ <del> console.log('data is prepared.'); <add> console.log(' [E] data is prepared.'); <add> console.log('}}'); <ide> cb(null, { <ide> data: data <ide> }); <ide> udManager.downloadFileInRange = function(path, offset, size, cb) { <ide> var retry = function () { <ide> pcs.getFileDownload(path, offset, size, function(error, response){ <del> console.log('B: ' + error); <ide> if(error){ <ide> console.log('[ERROR] retry, error happened: ' + error); <ide> retry();
Java
apache-2.0
51c50d8546adef7132728f65e785dff4f0d4e3bf
0
clebertsuconic/activemq-artemis,clebertsuconic/activemq-artemis,tabish121/activemq-artemis,tabish121/activemq-artemis,apache/activemq-artemis,tabish121/activemq-artemis,apache/activemq-artemis,tabish121/activemq-artemis,clebertsuconic/activemq-artemis,apache/activemq-artemis,clebertsuconic/activemq-artemis,apache/activemq-artemis
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.activemq.artemis.core.management.impl; import javax.management.MBeanAttributeInfo; import javax.management.MBeanOperationInfo; import javax.management.openmbean.CompositeData; import javax.transaction.xa.Xid; import java.util.ArrayList; import java.util.Collection; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.NoSuchElementException; import org.apache.activemq.artemis.api.core.ActiveMQException; import org.apache.activemq.artemis.api.core.JsonUtil; import org.apache.activemq.artemis.api.core.Message; import org.apache.activemq.artemis.api.core.SimpleString; import org.apache.activemq.artemis.api.core.management.QueueControl; import org.apache.activemq.artemis.api.core.management.ResourceNames; import org.apache.activemq.artemis.core.filter.Filter; import org.apache.activemq.artemis.core.filter.impl.FilterImpl; import org.apache.activemq.artemis.core.messagecounter.MessageCounter; import org.apache.activemq.artemis.core.messagecounter.impl.MessageCounterHelper; import org.apache.activemq.artemis.core.persistence.StorageManager; import org.apache.activemq.artemis.core.postoffice.Binding; import org.apache.activemq.artemis.core.security.SecurityStore; import org.apache.activemq.artemis.core.server.ActiveMQMessageBundle; import org.apache.activemq.artemis.core.server.ActiveMQServer; import org.apache.activemq.artemis.core.server.Consumer; import org.apache.activemq.artemis.core.server.MessageReference; import org.apache.activemq.artemis.core.server.Queue; import org.apache.activemq.artemis.core.server.ServerConsumer; import org.apache.activemq.artemis.core.server.impl.RefsOperation; import org.apache.activemq.artemis.core.settings.HierarchicalRepository; import org.apache.activemq.artemis.core.settings.impl.AddressSettings; import org.apache.activemq.artemis.core.transaction.ResourceManager; import org.apache.activemq.artemis.core.transaction.Transaction; import org.apache.activemq.artemis.core.transaction.TransactionOperation; import org.apache.activemq.artemis.json.JsonArray; import org.apache.activemq.artemis.json.JsonArrayBuilder; import org.apache.activemq.artemis.json.JsonObjectBuilder; import org.apache.activemq.artemis.logs.AuditLogger; import org.apache.activemq.artemis.selector.filter.Filterable; import org.apache.activemq.artemis.utils.JsonLoader; import org.apache.activemq.artemis.utils.collections.LinkedListIterator; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.lang.invoke.MethodHandles; public class QueueControlImpl extends AbstractControl implements QueueControl { private static final Logger logger = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); public static final int FLUSH_LIMIT = 500; private final Queue queue; private final String address; private final ActiveMQServer server; private final StorageManager storageManager; private final SecurityStore securityStore; private final HierarchicalRepository<AddressSettings> addressSettingsRepository; private MessageCounter counter; private static String toJSON(final Map<String, Object>[] messages) { JsonArray array = toJSONMsgArray(messages); return array.toString(); } private static JsonArray toJSONMsgArray(final Map<String, Object>[] messages) { JsonArrayBuilder array = JsonLoader.createArrayBuilder(); for (Map<String, Object> message : messages) { array.add(JsonUtil.toJsonObject(message)); } return array.build(); } private static String toJSON(final Map<String, Map<String, Object>[]> messages) { JsonArrayBuilder arrayReturn = JsonLoader.createArrayBuilder(); for (Map.Entry<String, Map<String, Object>[]> entry : messages.entrySet()) { JsonObjectBuilder objectItem = JsonLoader.createObjectBuilder(); objectItem.add("consumerName", entry.getKey()); objectItem.add("elements", toJSONMsgArray(entry.getValue())); arrayReturn.add(objectItem); } return arrayReturn.build().toString(); } public QueueControlImpl(final Queue queue, final String address, final ActiveMQServer server, final StorageManager storageManager, final SecurityStore securityStore, final HierarchicalRepository<AddressSettings> addressSettingsRepository) throws Exception { super(QueueControl.class, storageManager); this.queue = queue; this.address = address; this.server = server; this.storageManager = storageManager; this.securityStore = securityStore; this.addressSettingsRepository = addressSettingsRepository; } public void setMessageCounter(final MessageCounter counter) { this.counter = counter; } // QueueControlMBean implementation ------------------------------ @Override public String getName() { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.getName(queue); } clearIO(); try { return queue.getName().toString(); } finally { blockOnIO(); } } @Override public String getAddress() { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.getAddress(queue); } checkStarted(); return address; } @Override public String getFilter() { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.getFilter(queue); } checkStarted(); clearIO(); try { Filter filter = queue.getFilter(); return filter != null ? filter.getFilterString().toString() : null; } finally { blockOnIO(); } } @Override public boolean isDurable() { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.isDurable(queue); } checkStarted(); clearIO(); try { return queue.isDurable(); } finally { blockOnIO(); } } @Override public String getUser() { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.getUser(queue); } checkStarted(); clearIO(); try { SimpleString user = queue.getUser(); return user == null ? null : user.toString(); } finally { blockOnIO(); } } @Override public String getRoutingType() { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.getRoutingType(queue); } checkStarted(); clearIO(); try { return queue.getRoutingType().toString(); } finally { blockOnIO(); } } @Override public boolean isTemporary() { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.isTemporary(queue); } checkStarted(); clearIO(); try { return queue.isTemporary(); } finally { blockOnIO(); } } @Override public boolean isRetroactiveResource() { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.isRetroactiveResource(queue); } checkStarted(); clearIO(); try { return ResourceNames.isRetroactiveResource(server.getInternalNamingPrefix(), queue.getName()); } finally { blockOnIO(); } } @Override public long getMessageCount() { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.getMessageCount(queue); } checkStarted(); clearIO(); try { return queue.getMessageCount(); } finally { blockOnIO(); } } @Override public long getPersistentSize() { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.getPersistentSize(queue); } checkStarted(); clearIO(); try { return queue.getPersistentSize(); } finally { blockOnIO(); } } @Override public long getDurableMessageCount() { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.getDurableMessageCount(queue); } checkStarted(); clearIO(); try { return queue.getDurableMessageCount(); } finally { blockOnIO(); } } @Override public long getDurablePersistentSize() { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.getDurablePersistSize(queue); } checkStarted(); clearIO(); try { return queue.getDurablePersistentSize(); } finally { blockOnIO(); } } @Override public int getConsumerCount() { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.getConsumerCount(queue); } checkStarted(); clearIO(); try { return queue.getConsumerCount(); } finally { blockOnIO(); } } @Override public int getDeliveringCount() { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.getDeliveringCount(queue); } checkStarted(); clearIO(); try { return queue.getDeliveringCount(); } finally { blockOnIO(); } } @Override public long getDeliveringSize() { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.getDeliveringSize(queue); } checkStarted(); clearIO(); try { return queue.getDeliveringSize(); } finally { blockOnIO(); } } @Override public int getDurableDeliveringCount() { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.getDurableDeliveringCount(queue); } checkStarted(); clearIO(); try { return queue.getDurableDeliveringCount(); } finally { blockOnIO(); } } @Override public long getDurableDeliveringSize() { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.getDurableDeliveringSize(queue); } checkStarted(); clearIO(); try { return queue.getDurableDeliveringSize(); } finally { blockOnIO(); } } @Override public long getMessagesAdded() { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.getMessagesAdded(queue); } checkStarted(); clearIO(); try { return queue.getMessagesAdded(); } finally { blockOnIO(); } } @Override public long getMessagesAcknowledged() { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.getMessagesAcknowledged(queue); } checkStarted(); clearIO(); try { return queue.getMessagesAcknowledged(); } finally { blockOnIO(); } } @Override public long getAcknowledgeAttempts() { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.getMessagesAcknowledged(queue); } checkStarted(); clearIO(); try { return queue.getAcknowledgeAttempts(); } finally { blockOnIO(); } } @Override public long getMessagesExpired() { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.getMessagesExpired(queue); } checkStarted(); clearIO(); try { return queue.getMessagesExpired(); } finally { blockOnIO(); } } @Override public long getMessagesKilled() { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.getMessagesKilled(queue); } checkStarted(); clearIO(); try { return queue.getMessagesKilled(); } finally { blockOnIO(); } } @Override public long getID() { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.getID(queue); } checkStarted(); clearIO(); try { return queue.getID(); } finally { blockOnIO(); } } @Override public long getScheduledCount() { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.getScheduledCount(queue); } checkStarted(); clearIO(); try { return queue.getScheduledCount(); } finally { blockOnIO(); } } @Override public long getScheduledSize() { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.getScheduledSize(queue); } checkStarted(); clearIO(); try { return queue.getScheduledSize(); } finally { blockOnIO(); } } @Override public long getDurableScheduledCount() { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.getDurableScheduledCount(queue); } checkStarted(); clearIO(); try { return queue.getDurableScheduledCount(); } finally { blockOnIO(); } } @Override public long getDurableScheduledSize() { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.getDurableScheduledSize(queue); } checkStarted(); clearIO(); try { return queue.getDurableScheduledSize(); } finally { blockOnIO(); } } @Override public String getDeadLetterAddress() { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.getDeadLetterAddress(queue); } checkStarted(); clearIO(); try { return queue.getDeadLetterAddress() == null ? null : queue.getDeadLetterAddress().toString(); } finally { blockOnIO(); } } @Override public String getExpiryAddress() { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.getExpiryAddress(queue); } checkStarted(); clearIO(); try { return queue.getExpiryAddress() == null ? null : queue.getExpiryAddress().toString(); } finally { blockOnIO(); } } @Override public int getMaxConsumers() { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.getMaxConsumers(queue); } checkStarted(); clearIO(); try { return queue.getMaxConsumers(); } finally { blockOnIO(); } } @Override public boolean isPurgeOnNoConsumers() { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.isPurgeOnNoConsumers(queue); } checkStarted(); clearIO(); try { return queue.isPurgeOnNoConsumers(); } finally { blockOnIO(); } } @Override public void disable() throws Exception { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.disable(queue); } checkStarted(); clearIO(); try { server.getPostOffice().updateQueue(queue.getQueueConfiguration().setEnabled(false)); } finally { blockOnIO(); } } @Override public void enable() throws Exception { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.enable(queue); } checkStarted(); clearIO(); try { server.getPostOffice().updateQueue(queue.getQueueConfiguration().setEnabled(true)); } finally { blockOnIO(); } } @Override public boolean isEnabled() { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.isEnabled(queue); } checkStarted(); clearIO(); try { return queue.isEnabled(); } finally { blockOnIO(); } } @Override public boolean isConfigurationManaged() { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.isConfigurationManaged(queue); } checkStarted(); clearIO(); try { return queue.isConfigurationManaged(); } finally { blockOnIO(); } } @Override public boolean isExclusive() { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.isExclusive(queue); } checkStarted(); clearIO(); try { return queue.isExclusive(); } finally { blockOnIO(); } } @Override public boolean isLastValue() { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.isLastValue(queue); } checkStarted(); clearIO(); try { return queue.isLastValue(); } finally { blockOnIO(); } } @Override public String getLastValueKey() { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.lastValueKey(queue); } checkStarted(); clearIO(); try { if (queue.getLastValueKey() != null) { return queue.getLastValueKey().toString(); } else { return null; } } finally { blockOnIO(); } } @Override public int getConsumersBeforeDispatch() { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.consumersBeforeDispatch(queue); } checkStarted(); clearIO(); try { return queue.getConsumersBeforeDispatch(); } finally { blockOnIO(); } } @Override public long getDelayBeforeDispatch() { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.delayBeforeDispatch(queue); } checkStarted(); clearIO(); try { return queue.getDelayBeforeDispatch(); } finally { blockOnIO(); } } @Override public Map<String, Object>[] listScheduledMessages() throws Exception { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.listScheduledMessages(queue); } checkStarted(); clearIO(); try { List<MessageReference> refs = queue.getScheduledMessages(); return convertMessagesToMaps(refs); } finally { blockOnIO(); } } @Override public String listScheduledMessagesAsJSON() throws Exception { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.listScheduledMessagesAsJSON(queue); } checkStarted(); clearIO(); try { return QueueControlImpl.toJSON(listScheduledMessages()); } finally { blockOnIO(); } } /** * @param refs * @return */ private Map<String, Object>[] convertMessagesToMaps(List<MessageReference> refs) throws ActiveMQException { final int attributeSizeLimit = addressSettingsRepository.getMatch(address).getManagementMessageAttributeSizeLimit(); Map<String, Object>[] messages = new Map[refs.size()]; int i = 0; for (MessageReference ref : refs) { Message message = ref.getMessage(); messages[i++] = message.toMap(attributeSizeLimit); } return messages; } @Override public Map<String, Map<String, Object>[]> listDeliveringMessages() throws ActiveMQException { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.listDeliveringMessages(queue); } checkStarted(); clearIO(); try { Map<String, List<MessageReference>> msgs = queue.getDeliveringMessages(); Map<String, Map<String, Object>[]> msgRet = new HashMap<>(); for (Map.Entry<String, List<MessageReference>> entry : msgs.entrySet()) { msgRet.put(entry.getKey(), convertMessagesToMaps(entry.getValue())); } return msgRet; } finally { blockOnIO(); } } @Override public String listDeliveringMessagesAsJSON() throws Exception { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.listDeliveringMessagesAsJSON(queue); } checkStarted(); clearIO(); try { return QueueControlImpl.toJSON(listDeliveringMessages()); } finally { blockOnIO(); } } @Override public Map<String, Object>[] listMessages(final String filterStr) throws Exception { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.listMessages(queue, filterStr); } checkStarted(); clearIO(); try { Filter filter = FilterImpl.createFilter(filterStr); List<Map<String, Object>> messages = new ArrayList<>(); queue.flushExecutor(); final AddressSettings addressSettings = addressSettingsRepository.getMatch(address); final int attributeSizeLimit = addressSettings.getManagementMessageAttributeSizeLimit(); final int limit = addressSettings.getManagementBrowsePageSize(); int count = 0; try (LinkedListIterator<MessageReference> iterator = queue.browserIterator()) { try { while (iterator.hasNext() && count++ < limit) { MessageReference ref = iterator.next(); if (filter == null || filter.match(ref.getMessage())) { Message message = ref.getMessage(); messages.add(message.toMap(attributeSizeLimit)); } } } catch (NoSuchElementException ignored) { // this could happen through paging browsing } return messages.toArray(new Map[messages.size()]); } } catch (ActiveMQException e) { throw new IllegalStateException(e.getMessage()); } finally { blockOnIO(); } } @Override public String listMessagesAsJSON(final String filter) throws Exception { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.listMessagesAsJSON(queue); } checkStarted(); clearIO(); try { return QueueControlImpl.toJSON(listMessages(filter)); } finally { blockOnIO(); } } /** * this method returns a Map representing the first message. * or null if there's no first message. * @return * @throws Exception */ protected Map<String, Object> getFirstMessage() throws Exception { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.getFirstMessage(queue); } checkStarted(); clearIO(); try { final int attributeSizeLimit = addressSettingsRepository.getMatch(address).getManagementMessageAttributeSizeLimit(); MessageReference firstMessage = queue.peekFirstMessage(); if (firstMessage != null) { return firstMessage.getMessage().toMap(attributeSizeLimit); } else { return null; } } finally { blockOnIO(); } } @Override public String getFirstMessageAsJSON() throws Exception { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.getFirstMessageAsJSON(queue); } Map<String, Object> message = getFirstMessage(); // I"m returning a new Map[1] in case of no first message, because older versions used to return that when null // and I'm playing safe with the compatibility here. return toJSON(message == null ? new Map[1] : new Map[]{message}); } @Override public Long getFirstMessageTimestamp() throws Exception { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.getFirstMessageTimestamp(queue); } Map<String, Object> message = getFirstMessage(); if (message == null) { return null; } else { if (!message.containsKey("timestamp")) { return null; } else { return (Long) message.get("timestamp"); } } } @Override public Long getFirstMessageAge() throws Exception { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.getFirstMessageAge(queue); } Long firstMessageTimestamp = getFirstMessageTimestamp(); if (firstMessageTimestamp == null) { return null; } long now = new Date().getTime(); return now - firstMessageTimestamp.longValue(); } @Override public long countMessages() throws Exception { return countMessages(null); } @Override public long countMessages(final String filterStr) throws Exception { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.countMessages(queue, filterStr); } Long value = internalCountMessages(filterStr, null).get(null); return value == null ? 0 : value; } @Override public String countMessages(final String filterStr, final String groupByProperty) throws Exception { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.countMessages(queue, filterStr, groupByProperty); } return JsonUtil.toJsonObject(internalCountMessages(filterStr, groupByProperty)).toString(); } private Map<String, Long> internalCountMessages(final String filterStr, final String groupByPropertyStr) throws Exception { checkStarted(); clearIO(); Map<String, Long> result = new HashMap<>(); try { Filter filter = FilterImpl.createFilter(filterStr); SimpleString groupByProperty = SimpleString.toSimpleString(groupByPropertyStr); if (filter == null && groupByProperty == null) { result.put(null, getMessageCount()); } else { final int limit = addressSettingsRepository.getMatch(address).getManagementBrowsePageSize(); int count = 0; try (LinkedListIterator<MessageReference> iterator = queue.browserIterator()) { try { while (iterator.hasNext() && count++ < limit) { Message message = iterator.next().getMessage(); internalComputeMessage(result, filter, groupByProperty, message); } } catch (NoSuchElementException ignored) { // this could happen through paging browsing } } } return result; } finally { blockOnIO(); } } @Override public long countDeliveringMessages(final String filterStr) throws Exception { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.countDeliveringMessages(queue, filterStr); } Long value = internalCountDeliveryMessages(filterStr, null).get(null); return value == null ? 0 : value; } @Override public String countDeliveringMessages(final String filterStr, final String groupByProperty) throws Exception { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.countDeliveringMessages(queue, filterStr, groupByProperty); } return JsonUtil.toJsonObject(internalCountDeliveryMessages(filterStr, groupByProperty)).toString(); } private Map<String, Long> internalCountDeliveryMessages(final String filterStr, final String groupByPropertyStr) throws Exception { checkStarted(); clearIO(); Map<String, Long> result = new HashMap<>(); try { Filter filter = FilterImpl.createFilter(filterStr); SimpleString groupByProperty = SimpleString.toSimpleString(groupByPropertyStr); if (filter == null && groupByProperty == null) { result.put(null, Long.valueOf(getDeliveringCount())); } else { Map<String, List<MessageReference>> deliveringMessages = queue.getDeliveringMessages(); deliveringMessages.forEach((s, messageReferenceList) -> messageReferenceList.forEach(messageReference -> internalComputeMessage(result, filter, groupByProperty, messageReference.getMessage()) )); } return result; } finally { blockOnIO(); } } private void internalComputeMessage(Map<String, Long> result, Filter filter, SimpleString groupByProperty, Message message) { if (filter == null || filter.match(message)) { if (groupByProperty == null) { result.compute(null, (k, v) -> v == null ? 1 : ++v); } else { Object value = message.getObjectProperty(groupByProperty); String valueStr = value == null ? null : value.toString(); result.compute(valueStr, (k, v) -> v == null ? 1 : ++v); } } } @Override public boolean removeMessage(final long messageID) throws Exception { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.removeMessage(queue, messageID); } checkStarted(); clearIO(); try { return queue.deleteReference(messageID); } catch (ActiveMQException e) { throw new IllegalStateException(e.getMessage()); } finally { blockOnIO(); } } @Override public int removeMessages(final String filterStr) throws Exception { return removeMessages(FLUSH_LIMIT, filterStr); } @Override public int removeMessages(final int flushLimit, final String filterStr) throws Exception { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.removeMessages(queue, flushLimit, filterStr); } checkStarted(); clearIO(); try { Filter filter = FilterImpl.createFilter(filterStr); int removed = 0; try { removed = queue.deleteMatchingReferences(flushLimit, filter); if (AuditLogger.isResourceLoggingEnabled()) { AuditLogger.removeMessagesSuccess(removed, queue.getName().toString()); } } catch (Exception e) { if (AuditLogger.isResourceLoggingEnabled()) { AuditLogger.removeMessagesFailure(queue.getName().toString()); } throw e; } return removed; } finally { blockOnIO(); } } @Override public int removeAllMessages() throws Exception { return removeMessages(FLUSH_LIMIT, null); } @Override public boolean expireMessage(final long messageID) throws Exception { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.expireMessage(queue, messageID); } checkStarted(); clearIO(); try { return queue.expireReference(messageID); } finally { blockOnIO(); } } @Override public int expireMessages(final String filterStr) throws Exception { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.expireMessages(queue, filterStr); } checkStarted(); clearIO(); try { Filter filter = FilterImpl.createFilter(filterStr); return queue.expireReferences(filter); } catch (ActiveMQException e) { throw new IllegalStateException(e.getMessage()); } finally { blockOnIO(); } } @Override public boolean retryMessage(final long messageID) throws Exception { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.retryMessage(queue, messageID); } checkStarted(); clearIO(); try { Filter singleMessageFilter = new Filter() { @Override public boolean match(Message message) { return message.getMessageID() == messageID; } @Override public boolean match(Map<String, String> map) { return false; } @Override public boolean match(Filterable filterable) { return false; } @Override public SimpleString getFilterString() { return new SimpleString("custom filter for MESSAGEID= messageID"); } }; return queue.retryMessages(singleMessageFilter) > 0; } finally { blockOnIO(); } } @Override public int retryMessages() throws Exception { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.retryMessages(queue); } checkStarted(); clearIO(); try { return queue.retryMessages(null); } finally { blockOnIO(); } } @Override public boolean moveMessage(final long messageID, final String otherQueueName) throws Exception { return moveMessage(messageID, otherQueueName, false); } @Override public boolean moveMessage(final long messageID, final String otherQueueName, final boolean rejectDuplicates) throws Exception { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.moveMessage(queue, messageID, otherQueueName, rejectDuplicates); } checkStarted(); clearIO(); try { Binding binding = server.getPostOffice().getBinding(new SimpleString(otherQueueName)); if (binding == null) { throw ActiveMQMessageBundle.BUNDLE.noQueueFound(otherQueueName); } return queue.moveReference(messageID, binding.getAddress(), binding, rejectDuplicates); } finally { blockOnIO(); } } @Override public int moveMessages(final String filterStr, final String otherQueueName) throws Exception { return moveMessages(filterStr, otherQueueName, false); } @Override public int moveMessages(final int flushLimit, final String filterStr, final String otherQueueName, final boolean rejectDuplicates) throws Exception { return moveMessages(flushLimit, filterStr, otherQueueName, rejectDuplicates, -1); } @Override public int moveMessages(final int flushLimit, final String filterStr, final String otherQueueName, final boolean rejectDuplicates, final int messageCount) throws Exception { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.moveMessages(queue, flushLimit, filterStr, otherQueueName, rejectDuplicates, messageCount); } checkStarted(); clearIO(); try { Filter filter = FilterImpl.createFilter(filterStr); Binding binding = server.getPostOffice().getBinding(new SimpleString(otherQueueName)); if (binding == null) { throw ActiveMQMessageBundle.BUNDLE.noQueueFound(otherQueueName); } int retValue = queue.moveReferences(flushLimit, filter, binding.getAddress(), rejectDuplicates, messageCount, binding); return retValue; } finally { blockOnIO(); } } @Override public int moveMessages(final String filterStr, final String otherQueueName, final boolean rejectDuplicates) throws Exception { return moveMessages(FLUSH_LIMIT, filterStr, otherQueueName, rejectDuplicates); } @Override public int sendMessagesToDeadLetterAddress(final String filterStr) throws Exception { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.sendMessagesToDeadLetterAddress(queue, filterStr); } checkStarted(); clearIO(); try { Filter filter = FilterImpl.createFilter(filterStr); return queue.sendMessagesToDeadLetterAddress(filter); } finally { blockOnIO(); } } @Override public String sendMessage(final Map<String, String> headers, final int type, final String body, boolean durable, final String user, final String password) throws Exception { return sendMessage(headers, type, body, durable, user, password, false); } @Override public String sendMessage(final Map<String, String> headers, final int type, final String body, boolean durable, final String user, final String password, boolean createMessageId) throws Exception { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.sendMessageThroughManagement(queue, headers, type, body, durable, user, "****"); } try { String s = sendMessage(queue.getAddress(), server, headers, type, body, durable, user, password, createMessageId, queue.getID()); if (AuditLogger.isResourceLoggingEnabled()) { AuditLogger.sendMessageSuccess(queue.getName().toString(), user); } return s; } catch (Exception e) { if (AuditLogger.isResourceLoggingEnabled()) { AuditLogger.sendMessageFailure(queue.getName().toString(), user); } throw new IllegalStateException(e.getMessage()); } } @Override public boolean sendMessageToDeadLetterAddress(final long messageID) throws Exception { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.sendMessageToDeadLetterAddress(queue, messageID); } checkStarted(); clearIO(); try { return queue.sendMessageToDeadLetterAddress(messageID); } finally { blockOnIO(); } } @Override public int changeMessagesPriority(final String filterStr, final int newPriority) throws Exception { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.changeMessagesPriority(queue, filterStr, newPriority); } checkStarted(); clearIO(); try { if (newPriority < 0 || newPriority > 9) { throw ActiveMQMessageBundle.BUNDLE.invalidNewPriority(newPriority); } Filter filter = FilterImpl.createFilter(filterStr); return queue.changeReferencesPriority(filter, (byte) newPriority); } finally { blockOnIO(); } } @Override public boolean changeMessagePriority(final long messageID, final int newPriority) throws Exception { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.changeMessagePriority(queue, messageID, newPriority); } checkStarted(); clearIO(); try { if (newPriority < 0 || newPriority > 9) { throw ActiveMQMessageBundle.BUNDLE.invalidNewPriority(newPriority); } return queue.changeReferencePriority(messageID, (byte) newPriority); } finally { blockOnIO(); } } @Override public String listMessageCounter() { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.listMessageCounter(queue); } checkStarted(); clearIO(); try { return counter.toJSon(); } catch (Exception e) { throw new IllegalStateException(e); } finally { blockOnIO(); } } @Override public void resetMessageCounter() { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.resetMessageCounter(queue); } checkStarted(); clearIO(); try { counter.resetCounter(); } finally { blockOnIO(); } } @Deprecated @Override public String listMessageCounterAsHTML() { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.listMessageCounterAsHTML(queue); } checkStarted(); clearIO(); try { return MessageCounterHelper.listMessageCounterAsHTML(new MessageCounter[]{counter}); } finally { blockOnIO(); } } @Override public String listMessageCounterHistory() throws Exception { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.listMessageCounterHistory(queue); } checkStarted(); clearIO(); try { return MessageCounterHelper.listMessageCounterHistory(counter); } finally { blockOnIO(); } } @Deprecated @Override public String listMessageCounterHistoryAsHTML() { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.listMessageCounterHistoryAsHTML(queue); } checkStarted(); clearIO(); try { return MessageCounterHelper.listMessageCounterHistoryAsHTML(new MessageCounter[]{counter}); } finally { blockOnIO(); } } @Override public void pause() { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.pause(queue); } checkStarted(); clearIO(); try { try { queue.pause(); if (AuditLogger.isResourceLoggingEnabled()) { AuditLogger.pauseQueueSuccess(queue.getName().toString()); } } catch (Exception e) { if (AuditLogger.isResourceLoggingEnabled()) { AuditLogger.pauseQueueFailure(queue.getName().toString()); } } } finally { blockOnIO(); } } @Override public void pause(boolean persist) { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.pause(queue, persist); } checkStarted(); clearIO(); try { try { queue.pause(persist); if (AuditLogger.isResourceLoggingEnabled()) { AuditLogger.pauseQueueSuccess(queue.getName().toString()); } } catch (Exception e) { if (AuditLogger.isResourceLoggingEnabled()) { AuditLogger.pauseQueueFailure(queue.getName().toString()); } } } finally { blockOnIO(); } } @Override public void resume() { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.resume(queue); } checkStarted(); clearIO(); try { try { queue.resume(); if (AuditLogger.isResourceLoggingEnabled()) { AuditLogger.resumeQueueSuccess(queue.getName().toString()); } } catch (Exception e) { if (AuditLogger.isResourceLoggingEnabled()) { AuditLogger.resumeQueueFailure(queue.getName().toString()); } e.printStackTrace(); } } finally { blockOnIO(); } } @Override public boolean isPaused() throws Exception { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.isPaused(queue); } checkStarted(); clearIO(); try { return queue.isPaused(); } finally { blockOnIO(); } } @Override public CompositeData[] browse(int page, int pageSize) throws Exception { return browse(page, pageSize, null); } @Override public CompositeData[] browse(int page, int pageSize, String filter) throws Exception { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.browse(queue, page, pageSize); } checkStarted(); clearIO(); try { long index = 0; long start = (long) (page - 1) * pageSize; long end = Math.min(page * pageSize, queue.getMessageCount()); ArrayList<CompositeData> c = new ArrayList<>(); Filter thefilter = FilterImpl.createFilter(filter); final int attributeSizeLimit = addressSettingsRepository.getMatch(address).getManagementMessageAttributeSizeLimit(); try (LinkedListIterator<MessageReference> iterator = queue.browserIterator()) { try { while (iterator.hasNext() && index < end) { MessageReference ref = iterator.next(); if (thefilter == null || thefilter.match(ref.getMessage())) { if (index >= start) { c.add(ref.getMessage().toCompositeData(attributeSizeLimit, ref.getDeliveryCount())); } //we only increase the index if we add a message, otherwise we could stop before we get to a filtered message index++; } } } catch (NoSuchElementException ignored) { // this could happen through paging browsing } CompositeData[] rc = new CompositeData[c.size()]; c.toArray(rc); if (AuditLogger.isResourceLoggingEnabled()) { AuditLogger.browseMessagesSuccess(queue.getName().toString(), c.size()); } return rc; } } catch (Exception e) { logger.warn(e.getMessage(), e); if (AuditLogger.isResourceLoggingEnabled()) { AuditLogger.browseMessagesFailure(queue.getName().toString()); } throw new IllegalStateException(e.getMessage()); } finally { blockOnIO(); } } @Override public CompositeData[] browse() throws Exception { return browse(null); } @Override public CompositeData[] browse(String filter) throws Exception { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.browse(queue, filter); } checkStarted(); clearIO(); try { final AddressSettings addressSettings = addressSettingsRepository.getMatch(address); final int attributeSizeLimit = addressSettings.getManagementMessageAttributeSizeLimit(); final int limit = addressSettings.getManagementBrowsePageSize(); int currentPageSize = 0; ArrayList<CompositeData> c = new ArrayList<>(); Filter thefilter = FilterImpl.createFilter(filter); try (LinkedListIterator<MessageReference> iterator = queue.browserIterator()) { try { while (iterator.hasNext() && currentPageSize++ < limit) { MessageReference ref = iterator.next(); if (thefilter == null || thefilter.match(ref.getMessage())) { c.add(ref.getMessage().toCompositeData(attributeSizeLimit, ref.getDeliveryCount())); } } } catch (NoSuchElementException ignored) { // this could happen through paging browsing } CompositeData[] rc = new CompositeData[c.size()]; c.toArray(rc); if (AuditLogger.isResourceLoggingEnabled()) { AuditLogger.browseMessagesSuccess(queue.getName().toString(), currentPageSize); } return rc; } } catch (ActiveMQException e) { if (AuditLogger.isResourceLoggingEnabled()) { AuditLogger.browseMessagesFailure(queue.getName().toString()); } throw new IllegalStateException(e.getMessage()); } finally { blockOnIO(); } } @Override public void flushExecutor() { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.flushExecutor(queue); } checkStarted(); clearIO(); try { queue.flushExecutor(); } finally { blockOnIO(); } } @Override public void resetAllGroups() { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.resetAllGroups(queue); } checkStarted(); clearIO(); try { queue.resetAllGroups(); } finally { blockOnIO(); } } @Override public void resetGroup(String groupID) { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.resetGroup(queue, groupID); } checkStarted(); clearIO(); try { queue.resetGroup(SimpleString.toSimpleString(groupID)); } finally { blockOnIO(); } } @Override public int getGroupCount() { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.getGroupCount(queue); } checkStarted(); clearIO(); try { return queue.getGroupCount(); } finally { blockOnIO(); } } @Override public String listGroupsAsJSON() throws Exception { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.listGroupsAsJSON(queue); } checkStarted(); clearIO(); try { Map<SimpleString, Consumer> groups = queue.getGroups(); JsonArrayBuilder jsonArray = JsonLoader.createArrayBuilder(); for (Map.Entry<SimpleString, Consumer> group : groups.entrySet()) { if (group.getValue() instanceof ServerConsumer) { ServerConsumer serverConsumer = (ServerConsumer) group.getValue(); JsonObjectBuilder obj = JsonLoader.createObjectBuilder().add("groupID", group.getKey().toString()).add("consumerID", serverConsumer.getID()).add("connectionID", serverConsumer.getConnectionID().toString()).add("sessionID", serverConsumer.getSessionID()).add("browseOnly", serverConsumer.isBrowseOnly()).add("creationTime", serverConsumer.getCreationTime()); jsonArray.add(obj); } } return jsonArray.build().toString(); } finally { blockOnIO(); } } @Override public long getRingSize() { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.getRingSize(queue); } checkStarted(); clearIO(); try { return queue.getRingSize(); } finally { blockOnIO(); } } @Override public String listConsumersAsJSON() throws Exception { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.listConsumersAsJSON(queue); } checkStarted(); clearIO(); try { Collection<Consumer> consumers = queue.getConsumers(); JsonArrayBuilder jsonArray = JsonLoader.createArrayBuilder(); for (Consumer consumer : consumers) { if (consumer instanceof ServerConsumer) { ServerConsumer serverConsumer = (ServerConsumer) consumer; JsonObjectBuilder obj = JsonLoader.createObjectBuilder().add("consumerID", serverConsumer.getID()).add("connectionID", serverConsumer.getConnectionID().toString()).add("sessionID", serverConsumer.getSessionID()).add("browseOnly", serverConsumer.isBrowseOnly()).add("creationTime", serverConsumer.getCreationTime()); jsonArray.add(obj); } } return jsonArray.build().toString(); } finally { blockOnIO(); } } @Override protected MBeanOperationInfo[] fillMBeanOperationInfo() { return MBeanInfoHelper.getMBeanOperationsInfo(QueueControl.class); } @Override protected MBeanAttributeInfo[] fillMBeanAttributeInfo() { return MBeanInfoHelper.getMBeanAttributesInfo(QueueControl.class); } @Override public void resetMessagesAdded() throws Exception { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.resetMessagesAdded(queue); } checkStarted(); clearIO(); try { queue.resetMessagesAdded(); } finally { blockOnIO(); } } @Override public void resetMessagesAcknowledged() throws Exception { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.resetMessagesAcknowledged(queue); } checkStarted(); clearIO(); try { queue.resetMessagesAcknowledged(); } finally { blockOnIO(); } } @Override public void resetMessagesExpired() throws Exception { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.resetMessagesExpired(queue); } checkStarted(); clearIO(); try { queue.resetMessagesExpired(); } finally { blockOnIO(); } } @Override public void resetMessagesKilled() throws Exception { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.resetMessagesKilled(queue); } checkStarted(); clearIO(); try { queue.resetMessagesKilled(); } finally { blockOnIO(); } } @Override public boolean isGroupRebalance() { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.isGroupRebalance(queue); } checkStarted(); clearIO(); try { return queue.isGroupRebalance(); } finally { blockOnIO(); } } @Override public boolean isGroupRebalancePauseDispatch() { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.isGroupRebalancePauseDispatch(queue); } checkStarted(); clearIO(); try { return queue.isGroupRebalancePauseDispatch(); } finally { blockOnIO(); } } @Override public int getGroupBuckets() { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.getGroupBuckets(queue); } checkStarted(); clearIO(); try { return queue.getGroupBuckets(); } finally { blockOnIO(); } } @Override public String getGroupFirstKey() { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.getGroupFirstKey(queue); } checkStarted(); clearIO(); try { SimpleString groupFirstKey = queue.getGroupFirstKey(); return groupFirstKey != null ? groupFirstKey.toString() : null; } finally { blockOnIO(); } } @Override public int getPreparedTransactionMessageCount() { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.getPreparedTransactionMessageCount(queue); } checkStarted(); clearIO(); try { int count = 0; ResourceManager resourceManager = server.getResourceManager(); if (resourceManager != null) { List<Xid> preparedTransactions = resourceManager.getPreparedTransactions(); for (Xid preparedTransaction : preparedTransactions) { Transaction transaction = resourceManager.getTransaction(preparedTransaction); if (transaction != null) { List<TransactionOperation> allOperations = transaction.getAllOperations(); for (TransactionOperation operation : allOperations) { if (operation instanceof RefsOperation) { RefsOperation refsOperation = (RefsOperation) operation; List<MessageReference> references = refsOperation.getReferencesToAcknowledge(); for (MessageReference reference : references) { if (reference != null && reference.getQueue().getName().equals(queue.getName())) { count++; } } } } } } } return count; } finally { blockOnIO(); } } @Override public void deliverScheduledMessages(String filter) throws Exception { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.deliverScheduledMessage(queue, filter); } checkStarted(); clearIO(); try { queue.deliverScheduledMessages(filter); } finally { blockOnIO(); } } @Override public void deliverScheduledMessage(long messageId) throws Exception { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.deliverScheduledMessage(queue, messageId); } checkStarted(); clearIO(); try { queue.deliverScheduledMessage(messageId); } finally { blockOnIO(); } } @Override public boolean isAutoDelete() { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.isAutoDelete(queue); } checkStarted(); clearIO(); try { return queue.isAutoDelete(); } finally { blockOnIO(); } } private void checkStarted() { if (!server.getPostOffice().isStarted()) { throw new IllegalStateException("Broker is not started. Queue can not be managed yet"); } } }
artemis-server/src/main/java/org/apache/activemq/artemis/core/management/impl/QueueControlImpl.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.activemq.artemis.core.management.impl; import javax.management.MBeanAttributeInfo; import javax.management.MBeanOperationInfo; import javax.management.openmbean.CompositeData; import javax.transaction.xa.Xid; import java.util.ArrayList; import java.util.Collection; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.NoSuchElementException; import org.apache.activemq.artemis.api.core.ActiveMQException; import org.apache.activemq.artemis.api.core.JsonUtil; import org.apache.activemq.artemis.api.core.Message; import org.apache.activemq.artemis.api.core.SimpleString; import org.apache.activemq.artemis.api.core.management.QueueControl; import org.apache.activemq.artemis.api.core.management.ResourceNames; import org.apache.activemq.artemis.core.filter.Filter; import org.apache.activemq.artemis.core.filter.impl.FilterImpl; import org.apache.activemq.artemis.core.messagecounter.MessageCounter; import org.apache.activemq.artemis.core.messagecounter.impl.MessageCounterHelper; import org.apache.activemq.artemis.core.persistence.StorageManager; import org.apache.activemq.artemis.core.postoffice.Binding; import org.apache.activemq.artemis.core.security.SecurityStore; import org.apache.activemq.artemis.core.server.ActiveMQMessageBundle; import org.apache.activemq.artemis.core.server.ActiveMQServer; import org.apache.activemq.artemis.core.server.Consumer; import org.apache.activemq.artemis.core.server.MessageReference; import org.apache.activemq.artemis.core.server.Queue; import org.apache.activemq.artemis.core.server.ServerConsumer; import org.apache.activemq.artemis.core.server.impl.RefsOperation; import org.apache.activemq.artemis.core.settings.HierarchicalRepository; import org.apache.activemq.artemis.core.settings.impl.AddressSettings; import org.apache.activemq.artemis.core.transaction.ResourceManager; import org.apache.activemq.artemis.core.transaction.Transaction; import org.apache.activemq.artemis.core.transaction.TransactionOperation; import org.apache.activemq.artemis.json.JsonArray; import org.apache.activemq.artemis.json.JsonArrayBuilder; import org.apache.activemq.artemis.json.JsonObjectBuilder; import org.apache.activemq.artemis.logs.AuditLogger; import org.apache.activemq.artemis.selector.filter.Filterable; import org.apache.activemq.artemis.utils.JsonLoader; import org.apache.activemq.artemis.utils.collections.LinkedListIterator; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.lang.invoke.MethodHandles; public class QueueControlImpl extends AbstractControl implements QueueControl { private static final Logger logger = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); public static final int FLUSH_LIMIT = 500; private final Queue queue; private final String address; private final ActiveMQServer server; private final StorageManager storageManager; private final SecurityStore securityStore; private final HierarchicalRepository<AddressSettings> addressSettingsRepository; private MessageCounter counter; private static String toJSON(final Map<String, Object>[] messages) { JsonArray array = toJSONMsgArray(messages); return array.toString(); } private static JsonArray toJSONMsgArray(final Map<String, Object>[] messages) { JsonArrayBuilder array = JsonLoader.createArrayBuilder(); for (Map<String, Object> message : messages) { array.add(JsonUtil.toJsonObject(message)); } return array.build(); } private static String toJSON(final Map<String, Map<String, Object>[]> messages) { JsonArrayBuilder arrayReturn = JsonLoader.createArrayBuilder(); for (Map.Entry<String, Map<String, Object>[]> entry : messages.entrySet()) { JsonObjectBuilder objectItem = JsonLoader.createObjectBuilder(); objectItem.add("consumerName", entry.getKey()); objectItem.add("elements", toJSONMsgArray(entry.getValue())); arrayReturn.add(objectItem); } return arrayReturn.build().toString(); } public QueueControlImpl(final Queue queue, final String address, final ActiveMQServer server, final StorageManager storageManager, final SecurityStore securityStore, final HierarchicalRepository<AddressSettings> addressSettingsRepository) throws Exception { super(QueueControl.class, storageManager); this.queue = queue; this.address = address; this.server = server; this.storageManager = storageManager; this.securityStore = securityStore; this.addressSettingsRepository = addressSettingsRepository; } public void setMessageCounter(final MessageCounter counter) { this.counter = counter; } // QueueControlMBean implementation ------------------------------ @Override public String getName() { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.getName(queue); } clearIO(); try { return queue.getName().toString(); } finally { blockOnIO(); } } @Override public String getAddress() { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.getAddress(queue); } checkStarted(); return address; } @Override public String getFilter() { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.getFilter(queue); } checkStarted(); clearIO(); try { Filter filter = queue.getFilter(); return filter != null ? filter.getFilterString().toString() : null; } finally { blockOnIO(); } } @Override public boolean isDurable() { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.isDurable(queue); } checkStarted(); clearIO(); try { return queue.isDurable(); } finally { blockOnIO(); } } @Override public String getUser() { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.getUser(queue); } checkStarted(); clearIO(); try { SimpleString user = queue.getUser(); return user == null ? null : user.toString(); } finally { blockOnIO(); } } @Override public String getRoutingType() { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.getRoutingType(queue); } checkStarted(); clearIO(); try { return queue.getRoutingType().toString(); } finally { blockOnIO(); } } @Override public boolean isTemporary() { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.isTemporary(queue); } checkStarted(); clearIO(); try { return queue.isTemporary(); } finally { blockOnIO(); } } @Override public boolean isRetroactiveResource() { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.isRetroactiveResource(queue); } checkStarted(); clearIO(); try { return ResourceNames.isRetroactiveResource(server.getInternalNamingPrefix(), queue.getName()); } finally { blockOnIO(); } } @Override public long getMessageCount() { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.getMessageCount(queue); } checkStarted(); clearIO(); try { return queue.getMessageCount(); } finally { blockOnIO(); } } @Override public long getPersistentSize() { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.getPersistentSize(queue); } checkStarted(); clearIO(); try { return queue.getPersistentSize(); } finally { blockOnIO(); } } @Override public long getDurableMessageCount() { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.getDurableMessageCount(queue); } checkStarted(); clearIO(); try { return queue.getDurableMessageCount(); } finally { blockOnIO(); } } @Override public long getDurablePersistentSize() { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.getDurablePersistSize(queue); } checkStarted(); clearIO(); try { return queue.getDurablePersistentSize(); } finally { blockOnIO(); } } @Override public int getConsumerCount() { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.getConsumerCount(queue); } checkStarted(); clearIO(); try { return queue.getConsumerCount(); } finally { blockOnIO(); } } @Override public int getDeliveringCount() { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.getDeliveringCount(queue); } checkStarted(); clearIO(); try { return queue.getDeliveringCount(); } finally { blockOnIO(); } } @Override public long getDeliveringSize() { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.getDeliveringSize(queue); } checkStarted(); clearIO(); try { return queue.getDeliveringSize(); } finally { blockOnIO(); } } @Override public int getDurableDeliveringCount() { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.getDurableDeliveringCount(queue); } checkStarted(); clearIO(); try { return queue.getDurableDeliveringCount(); } finally { blockOnIO(); } } @Override public long getDurableDeliveringSize() { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.getDurableDeliveringSize(queue); } checkStarted(); clearIO(); try { return queue.getDurableDeliveringSize(); } finally { blockOnIO(); } } @Override public long getMessagesAdded() { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.getMessagesAdded(queue); } checkStarted(); clearIO(); try { return queue.getMessagesAdded(); } finally { blockOnIO(); } } @Override public long getMessagesAcknowledged() { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.getMessagesAcknowledged(queue); } checkStarted(); clearIO(); try { return queue.getMessagesAcknowledged(); } finally { blockOnIO(); } } @Override public long getAcknowledgeAttempts() { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.getMessagesAcknowledged(queue); } checkStarted(); clearIO(); try { return queue.getAcknowledgeAttempts(); } finally { blockOnIO(); } } @Override public long getMessagesExpired() { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.getMessagesExpired(queue); } checkStarted(); clearIO(); try { return queue.getMessagesExpired(); } finally { blockOnIO(); } } @Override public long getMessagesKilled() { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.getMessagesKilled(queue); } checkStarted(); clearIO(); try { return queue.getMessagesKilled(); } finally { blockOnIO(); } } @Override public long getID() { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.getID(queue); } checkStarted(); clearIO(); try { return queue.getID(); } finally { blockOnIO(); } } @Override public long getScheduledCount() { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.getScheduledCount(queue); } checkStarted(); clearIO(); try { return queue.getScheduledCount(); } finally { blockOnIO(); } } @Override public long getScheduledSize() { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.getScheduledSize(queue); } checkStarted(); clearIO(); try { return queue.getScheduledSize(); } finally { blockOnIO(); } } @Override public long getDurableScheduledCount() { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.getDurableScheduledCount(queue); } checkStarted(); clearIO(); try { return queue.getDurableScheduledCount(); } finally { blockOnIO(); } } @Override public long getDurableScheduledSize() { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.getDurableScheduledSize(queue); } checkStarted(); clearIO(); try { return queue.getDurableScheduledSize(); } finally { blockOnIO(); } } @Override public String getDeadLetterAddress() { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.getDeadLetterAddress(queue); } checkStarted(); clearIO(); try { return queue.getDeadLetterAddress() == null ? null : queue.getDeadLetterAddress().toString(); } finally { blockOnIO(); } } @Override public String getExpiryAddress() { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.getExpiryAddress(queue); } checkStarted(); clearIO(); try { return queue.getExpiryAddress() == null ? null : queue.getExpiryAddress().toString(); } finally { blockOnIO(); } } @Override public int getMaxConsumers() { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.getMaxConsumers(queue); } checkStarted(); clearIO(); try { return queue.getMaxConsumers(); } finally { blockOnIO(); } } @Override public boolean isPurgeOnNoConsumers() { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.isPurgeOnNoConsumers(queue); } checkStarted(); clearIO(); try { return queue.isPurgeOnNoConsumers(); } finally { blockOnIO(); } } @Override public void disable() throws Exception { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.disable(queue); } checkStarted(); clearIO(); try { server.getPostOffice().updateQueue(queue.getQueueConfiguration().setEnabled(false)); } finally { blockOnIO(); } } @Override public void enable() throws Exception { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.enable(queue); } checkStarted(); clearIO(); try { server.getPostOffice().updateQueue(queue.getQueueConfiguration().setEnabled(true)); } finally { blockOnIO(); } } @Override public boolean isEnabled() { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.isEnabled(queue); } checkStarted(); clearIO(); try { return queue.isEnabled(); } finally { blockOnIO(); } } @Override public boolean isConfigurationManaged() { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.isConfigurationManaged(queue); } checkStarted(); clearIO(); try { return queue.isConfigurationManaged(); } finally { blockOnIO(); } } @Override public boolean isExclusive() { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.isExclusive(queue); } checkStarted(); clearIO(); try { return queue.isExclusive(); } finally { blockOnIO(); } } @Override public boolean isLastValue() { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.isLastValue(queue); } checkStarted(); clearIO(); try { return queue.isLastValue(); } finally { blockOnIO(); } } @Override public String getLastValueKey() { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.lastValueKey(queue); } checkStarted(); clearIO(); try { if (queue.getLastValueKey() != null) { return queue.getLastValueKey().toString(); } else { return null; } } finally { blockOnIO(); } } @Override public int getConsumersBeforeDispatch() { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.consumersBeforeDispatch(queue); } checkStarted(); clearIO(); try { return queue.getConsumersBeforeDispatch(); } finally { blockOnIO(); } } @Override public long getDelayBeforeDispatch() { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.delayBeforeDispatch(queue); } checkStarted(); clearIO(); try { return queue.getDelayBeforeDispatch(); } finally { blockOnIO(); } } @Override public Map<String, Object>[] listScheduledMessages() throws Exception { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.listScheduledMessages(queue); } checkStarted(); clearIO(); try { List<MessageReference> refs = queue.getScheduledMessages(); return convertMessagesToMaps(refs); } finally { blockOnIO(); } } @Override public String listScheduledMessagesAsJSON() throws Exception { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.listScheduledMessagesAsJSON(queue); } checkStarted(); clearIO(); try { return QueueControlImpl.toJSON(listScheduledMessages()); } finally { blockOnIO(); } } /** * @param refs * @return */ private Map<String, Object>[] convertMessagesToMaps(List<MessageReference> refs) throws ActiveMQException { final int attributeSizeLimit = addressSettingsRepository.getMatch(address).getManagementMessageAttributeSizeLimit(); Map<String, Object>[] messages = new Map[refs.size()]; int i = 0; for (MessageReference ref : refs) { Message message = ref.getMessage(); messages[i++] = message.toMap(attributeSizeLimit); } return messages; } @Override public Map<String, Map<String, Object>[]> listDeliveringMessages() throws ActiveMQException { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.listDeliveringMessages(queue); } checkStarted(); clearIO(); try { Map<String, List<MessageReference>> msgs = queue.getDeliveringMessages(); Map<String, Map<String, Object>[]> msgRet = new HashMap<>(); for (Map.Entry<String, List<MessageReference>> entry : msgs.entrySet()) { msgRet.put(entry.getKey(), convertMessagesToMaps(entry.getValue())); } return msgRet; } finally { blockOnIO(); } } @Override public String listDeliveringMessagesAsJSON() throws Exception { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.listDeliveringMessagesAsJSON(queue); } checkStarted(); clearIO(); try { return QueueControlImpl.toJSON(listDeliveringMessages()); } finally { blockOnIO(); } } @Override public Map<String, Object>[] listMessages(final String filterStr) throws Exception { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.listMessages(queue, filterStr); } checkStarted(); clearIO(); try { Filter filter = FilterImpl.createFilter(filterStr); List<Map<String, Object>> messages = new ArrayList<>(); queue.flushExecutor(); final AddressSettings addressSettings = addressSettingsRepository.getMatch(address); final int attributeSizeLimit = addressSettings.getManagementMessageAttributeSizeLimit(); final int limit = addressSettings.getManagementBrowsePageSize(); int count = 0; try (LinkedListIterator<MessageReference> iterator = queue.browserIterator()) { try { while (iterator.hasNext() && count++ < limit) { MessageReference ref = iterator.next(); if (filter == null || filter.match(ref.getMessage())) { Message message = ref.getMessage(); messages.add(message.toMap(attributeSizeLimit)); } } } catch (NoSuchElementException ignored) { // this could happen through paging browsing } return messages.toArray(new Map[messages.size()]); } } catch (ActiveMQException e) { throw new IllegalStateException(e.getMessage()); } finally { blockOnIO(); } } @Override public String listMessagesAsJSON(final String filter) throws Exception { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.listMessagesAsJSON(queue); } checkStarted(); clearIO(); try { return QueueControlImpl.toJSON(listMessages(filter)); } finally { blockOnIO(); } } /** * this method returns a Map representing the first message. * or null if there's no first message. * @return * @throws Exception */ protected Map<String, Object> getFirstMessage() throws Exception { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.getFirstMessage(queue); } checkStarted(); clearIO(); try { List<Map<String, Object>> messages = new ArrayList<>(); final int attributeSizeLimit = addressSettingsRepository.getMatch(address).getManagementMessageAttributeSizeLimit(); MessageReference firstMessage = queue.peekFirstMessage(); if (firstMessage != null) { return firstMessage.getMessage().toMap(attributeSizeLimit); } else { return null; } } finally { blockOnIO(); } } @Override public String getFirstMessageAsJSON() throws Exception { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.getFirstMessageAsJSON(queue); } Map<String, Object> message = getFirstMessage(); return toJSON(message == null ? new Map[0] : new Map[]{message}); } @Override public Long getFirstMessageTimestamp() throws Exception { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.getFirstMessageTimestamp(queue); } Map<String, Object> message = getFirstMessage(); if (message == null) { return null; } else { if (!message.containsKey("timestamp")) { return null; } else { return (Long) message.get("timestamp"); } } } @Override public Long getFirstMessageAge() throws Exception { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.getFirstMessageAge(queue); } Long firstMessageTimestamp = getFirstMessageTimestamp(); if (firstMessageTimestamp == null) { return null; } long now = new Date().getTime(); return now - firstMessageTimestamp.longValue(); } @Override public long countMessages() throws Exception { return countMessages(null); } @Override public long countMessages(final String filterStr) throws Exception { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.countMessages(queue, filterStr); } Long value = internalCountMessages(filterStr, null).get(null); return value == null ? 0 : value; } @Override public String countMessages(final String filterStr, final String groupByProperty) throws Exception { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.countMessages(queue, filterStr, groupByProperty); } return JsonUtil.toJsonObject(internalCountMessages(filterStr, groupByProperty)).toString(); } private Map<String, Long> internalCountMessages(final String filterStr, final String groupByPropertyStr) throws Exception { checkStarted(); clearIO(); Map<String, Long> result = new HashMap<>(); try { Filter filter = FilterImpl.createFilter(filterStr); SimpleString groupByProperty = SimpleString.toSimpleString(groupByPropertyStr); if (filter == null && groupByProperty == null) { result.put(null, getMessageCount()); } else { final int limit = addressSettingsRepository.getMatch(address).getManagementBrowsePageSize(); int count = 0; try (LinkedListIterator<MessageReference> iterator = queue.browserIterator()) { try { while (iterator.hasNext() && count++ < limit) { Message message = iterator.next().getMessage(); internalComputeMessage(result, filter, groupByProperty, message); } } catch (NoSuchElementException ignored) { // this could happen through paging browsing } } } return result; } finally { blockOnIO(); } } @Override public long countDeliveringMessages(final String filterStr) throws Exception { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.countDeliveringMessages(queue, filterStr); } Long value = internalCountDeliveryMessages(filterStr, null).get(null); return value == null ? 0 : value; } @Override public String countDeliveringMessages(final String filterStr, final String groupByProperty) throws Exception { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.countDeliveringMessages(queue, filterStr, groupByProperty); } return JsonUtil.toJsonObject(internalCountDeliveryMessages(filterStr, groupByProperty)).toString(); } private Map<String, Long> internalCountDeliveryMessages(final String filterStr, final String groupByPropertyStr) throws Exception { checkStarted(); clearIO(); Map<String, Long> result = new HashMap<>(); try { Filter filter = FilterImpl.createFilter(filterStr); SimpleString groupByProperty = SimpleString.toSimpleString(groupByPropertyStr); if (filter == null && groupByProperty == null) { result.put(null, Long.valueOf(getDeliveringCount())); } else { Map<String, List<MessageReference>> deliveringMessages = queue.getDeliveringMessages(); deliveringMessages.forEach((s, messageReferenceList) -> messageReferenceList.forEach(messageReference -> internalComputeMessage(result, filter, groupByProperty, messageReference.getMessage()) )); } return result; } finally { blockOnIO(); } } private void internalComputeMessage(Map<String, Long> result, Filter filter, SimpleString groupByProperty, Message message) { if (filter == null || filter.match(message)) { if (groupByProperty == null) { result.compute(null, (k, v) -> v == null ? 1 : ++v); } else { Object value = message.getObjectProperty(groupByProperty); String valueStr = value == null ? null : value.toString(); result.compute(valueStr, (k, v) -> v == null ? 1 : ++v); } } } @Override public boolean removeMessage(final long messageID) throws Exception { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.removeMessage(queue, messageID); } checkStarted(); clearIO(); try { return queue.deleteReference(messageID); } catch (ActiveMQException e) { throw new IllegalStateException(e.getMessage()); } finally { blockOnIO(); } } @Override public int removeMessages(final String filterStr) throws Exception { return removeMessages(FLUSH_LIMIT, filterStr); } @Override public int removeMessages(final int flushLimit, final String filterStr) throws Exception { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.removeMessages(queue, flushLimit, filterStr); } checkStarted(); clearIO(); try { Filter filter = FilterImpl.createFilter(filterStr); int removed = 0; try { removed = queue.deleteMatchingReferences(flushLimit, filter); if (AuditLogger.isResourceLoggingEnabled()) { AuditLogger.removeMessagesSuccess(removed, queue.getName().toString()); } } catch (Exception e) { if (AuditLogger.isResourceLoggingEnabled()) { AuditLogger.removeMessagesFailure(queue.getName().toString()); } throw e; } return removed; } finally { blockOnIO(); } } @Override public int removeAllMessages() throws Exception { return removeMessages(FLUSH_LIMIT, null); } @Override public boolean expireMessage(final long messageID) throws Exception { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.expireMessage(queue, messageID); } checkStarted(); clearIO(); try { return queue.expireReference(messageID); } finally { blockOnIO(); } } @Override public int expireMessages(final String filterStr) throws Exception { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.expireMessages(queue, filterStr); } checkStarted(); clearIO(); try { Filter filter = FilterImpl.createFilter(filterStr); return queue.expireReferences(filter); } catch (ActiveMQException e) { throw new IllegalStateException(e.getMessage()); } finally { blockOnIO(); } } @Override public boolean retryMessage(final long messageID) throws Exception { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.retryMessage(queue, messageID); } checkStarted(); clearIO(); try { Filter singleMessageFilter = new Filter() { @Override public boolean match(Message message) { return message.getMessageID() == messageID; } @Override public boolean match(Map<String, String> map) { return false; } @Override public boolean match(Filterable filterable) { return false; } @Override public SimpleString getFilterString() { return new SimpleString("custom filter for MESSAGEID= messageID"); } }; return queue.retryMessages(singleMessageFilter) > 0; } finally { blockOnIO(); } } @Override public int retryMessages() throws Exception { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.retryMessages(queue); } checkStarted(); clearIO(); try { return queue.retryMessages(null); } finally { blockOnIO(); } } @Override public boolean moveMessage(final long messageID, final String otherQueueName) throws Exception { return moveMessage(messageID, otherQueueName, false); } @Override public boolean moveMessage(final long messageID, final String otherQueueName, final boolean rejectDuplicates) throws Exception { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.moveMessage(queue, messageID, otherQueueName, rejectDuplicates); } checkStarted(); clearIO(); try { Binding binding = server.getPostOffice().getBinding(new SimpleString(otherQueueName)); if (binding == null) { throw ActiveMQMessageBundle.BUNDLE.noQueueFound(otherQueueName); } return queue.moveReference(messageID, binding.getAddress(), binding, rejectDuplicates); } finally { blockOnIO(); } } @Override public int moveMessages(final String filterStr, final String otherQueueName) throws Exception { return moveMessages(filterStr, otherQueueName, false); } @Override public int moveMessages(final int flushLimit, final String filterStr, final String otherQueueName, final boolean rejectDuplicates) throws Exception { return moveMessages(flushLimit, filterStr, otherQueueName, rejectDuplicates, -1); } @Override public int moveMessages(final int flushLimit, final String filterStr, final String otherQueueName, final boolean rejectDuplicates, final int messageCount) throws Exception { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.moveMessages(queue, flushLimit, filterStr, otherQueueName, rejectDuplicates, messageCount); } checkStarted(); clearIO(); try { Filter filter = FilterImpl.createFilter(filterStr); Binding binding = server.getPostOffice().getBinding(new SimpleString(otherQueueName)); if (binding == null) { throw ActiveMQMessageBundle.BUNDLE.noQueueFound(otherQueueName); } int retValue = queue.moveReferences(flushLimit, filter, binding.getAddress(), rejectDuplicates, messageCount, binding); return retValue; } finally { blockOnIO(); } } @Override public int moveMessages(final String filterStr, final String otherQueueName, final boolean rejectDuplicates) throws Exception { return moveMessages(FLUSH_LIMIT, filterStr, otherQueueName, rejectDuplicates); } @Override public int sendMessagesToDeadLetterAddress(final String filterStr) throws Exception { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.sendMessagesToDeadLetterAddress(queue, filterStr); } checkStarted(); clearIO(); try { Filter filter = FilterImpl.createFilter(filterStr); return queue.sendMessagesToDeadLetterAddress(filter); } finally { blockOnIO(); } } @Override public String sendMessage(final Map<String, String> headers, final int type, final String body, boolean durable, final String user, final String password) throws Exception { return sendMessage(headers, type, body, durable, user, password, false); } @Override public String sendMessage(final Map<String, String> headers, final int type, final String body, boolean durable, final String user, final String password, boolean createMessageId) throws Exception { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.sendMessageThroughManagement(queue, headers, type, body, durable, user, "****"); } try { String s = sendMessage(queue.getAddress(), server, headers, type, body, durable, user, password, createMessageId, queue.getID()); if (AuditLogger.isResourceLoggingEnabled()) { AuditLogger.sendMessageSuccess(queue.getName().toString(), user); } return s; } catch (Exception e) { if (AuditLogger.isResourceLoggingEnabled()) { AuditLogger.sendMessageFailure(queue.getName().toString(), user); } throw new IllegalStateException(e.getMessage()); } } @Override public boolean sendMessageToDeadLetterAddress(final long messageID) throws Exception { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.sendMessageToDeadLetterAddress(queue, messageID); } checkStarted(); clearIO(); try { return queue.sendMessageToDeadLetterAddress(messageID); } finally { blockOnIO(); } } @Override public int changeMessagesPriority(final String filterStr, final int newPriority) throws Exception { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.changeMessagesPriority(queue, filterStr, newPriority); } checkStarted(); clearIO(); try { if (newPriority < 0 || newPriority > 9) { throw ActiveMQMessageBundle.BUNDLE.invalidNewPriority(newPriority); } Filter filter = FilterImpl.createFilter(filterStr); return queue.changeReferencesPriority(filter, (byte) newPriority); } finally { blockOnIO(); } } @Override public boolean changeMessagePriority(final long messageID, final int newPriority) throws Exception { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.changeMessagePriority(queue, messageID, newPriority); } checkStarted(); clearIO(); try { if (newPriority < 0 || newPriority > 9) { throw ActiveMQMessageBundle.BUNDLE.invalidNewPriority(newPriority); } return queue.changeReferencePriority(messageID, (byte) newPriority); } finally { blockOnIO(); } } @Override public String listMessageCounter() { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.listMessageCounter(queue); } checkStarted(); clearIO(); try { return counter.toJSon(); } catch (Exception e) { throw new IllegalStateException(e); } finally { blockOnIO(); } } @Override public void resetMessageCounter() { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.resetMessageCounter(queue); } checkStarted(); clearIO(); try { counter.resetCounter(); } finally { blockOnIO(); } } @Deprecated @Override public String listMessageCounterAsHTML() { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.listMessageCounterAsHTML(queue); } checkStarted(); clearIO(); try { return MessageCounterHelper.listMessageCounterAsHTML(new MessageCounter[]{counter}); } finally { blockOnIO(); } } @Override public String listMessageCounterHistory() throws Exception { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.listMessageCounterHistory(queue); } checkStarted(); clearIO(); try { return MessageCounterHelper.listMessageCounterHistory(counter); } finally { blockOnIO(); } } @Deprecated @Override public String listMessageCounterHistoryAsHTML() { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.listMessageCounterHistoryAsHTML(queue); } checkStarted(); clearIO(); try { return MessageCounterHelper.listMessageCounterHistoryAsHTML(new MessageCounter[]{counter}); } finally { blockOnIO(); } } @Override public void pause() { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.pause(queue); } checkStarted(); clearIO(); try { try { queue.pause(); if (AuditLogger.isResourceLoggingEnabled()) { AuditLogger.pauseQueueSuccess(queue.getName().toString()); } } catch (Exception e) { if (AuditLogger.isResourceLoggingEnabled()) { AuditLogger.pauseQueueFailure(queue.getName().toString()); } } } finally { blockOnIO(); } } @Override public void pause(boolean persist) { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.pause(queue, persist); } checkStarted(); clearIO(); try { try { queue.pause(persist); if (AuditLogger.isResourceLoggingEnabled()) { AuditLogger.pauseQueueSuccess(queue.getName().toString()); } } catch (Exception e) { if (AuditLogger.isResourceLoggingEnabled()) { AuditLogger.pauseQueueFailure(queue.getName().toString()); } } } finally { blockOnIO(); } } @Override public void resume() { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.resume(queue); } checkStarted(); clearIO(); try { try { queue.resume(); if (AuditLogger.isResourceLoggingEnabled()) { AuditLogger.resumeQueueSuccess(queue.getName().toString()); } } catch (Exception e) { if (AuditLogger.isResourceLoggingEnabled()) { AuditLogger.resumeQueueFailure(queue.getName().toString()); } e.printStackTrace(); } } finally { blockOnIO(); } } @Override public boolean isPaused() throws Exception { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.isPaused(queue); } checkStarted(); clearIO(); try { return queue.isPaused(); } finally { blockOnIO(); } } @Override public CompositeData[] browse(int page, int pageSize) throws Exception { return browse(page, pageSize, null); } @Override public CompositeData[] browse(int page, int pageSize, String filter) throws Exception { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.browse(queue, page, pageSize); } checkStarted(); clearIO(); try { long index = 0; long start = (long) (page - 1) * pageSize; long end = Math.min(page * pageSize, queue.getMessageCount()); ArrayList<CompositeData> c = new ArrayList<>(); Filter thefilter = FilterImpl.createFilter(filter); final int attributeSizeLimit = addressSettingsRepository.getMatch(address).getManagementMessageAttributeSizeLimit(); try (LinkedListIterator<MessageReference> iterator = queue.browserIterator()) { try { while (iterator.hasNext() && index < end) { MessageReference ref = iterator.next(); if (thefilter == null || thefilter.match(ref.getMessage())) { if (index >= start) { c.add(ref.getMessage().toCompositeData(attributeSizeLimit, ref.getDeliveryCount())); } //we only increase the index if we add a message, otherwise we could stop before we get to a filtered message index++; } } } catch (NoSuchElementException ignored) { // this could happen through paging browsing } CompositeData[] rc = new CompositeData[c.size()]; c.toArray(rc); if (AuditLogger.isResourceLoggingEnabled()) { AuditLogger.browseMessagesSuccess(queue.getName().toString(), c.size()); } return rc; } } catch (Exception e) { logger.warn(e.getMessage(), e); if (AuditLogger.isResourceLoggingEnabled()) { AuditLogger.browseMessagesFailure(queue.getName().toString()); } throw new IllegalStateException(e.getMessage()); } finally { blockOnIO(); } } @Override public CompositeData[] browse() throws Exception { return browse(null); } @Override public CompositeData[] browse(String filter) throws Exception { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.browse(queue, filter); } checkStarted(); clearIO(); try { final AddressSettings addressSettings = addressSettingsRepository.getMatch(address); final int attributeSizeLimit = addressSettings.getManagementMessageAttributeSizeLimit(); final int limit = addressSettings.getManagementBrowsePageSize(); int currentPageSize = 0; ArrayList<CompositeData> c = new ArrayList<>(); Filter thefilter = FilterImpl.createFilter(filter); try (LinkedListIterator<MessageReference> iterator = queue.browserIterator()) { try { while (iterator.hasNext() && currentPageSize++ < limit) { MessageReference ref = iterator.next(); if (thefilter == null || thefilter.match(ref.getMessage())) { c.add(ref.getMessage().toCompositeData(attributeSizeLimit, ref.getDeliveryCount())); } } } catch (NoSuchElementException ignored) { // this could happen through paging browsing } CompositeData[] rc = new CompositeData[c.size()]; c.toArray(rc); if (AuditLogger.isResourceLoggingEnabled()) { AuditLogger.browseMessagesSuccess(queue.getName().toString(), currentPageSize); } return rc; } } catch (ActiveMQException e) { if (AuditLogger.isResourceLoggingEnabled()) { AuditLogger.browseMessagesFailure(queue.getName().toString()); } throw new IllegalStateException(e.getMessage()); } finally { blockOnIO(); } } @Override public void flushExecutor() { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.flushExecutor(queue); } checkStarted(); clearIO(); try { queue.flushExecutor(); } finally { blockOnIO(); } } @Override public void resetAllGroups() { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.resetAllGroups(queue); } checkStarted(); clearIO(); try { queue.resetAllGroups(); } finally { blockOnIO(); } } @Override public void resetGroup(String groupID) { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.resetGroup(queue, groupID); } checkStarted(); clearIO(); try { queue.resetGroup(SimpleString.toSimpleString(groupID)); } finally { blockOnIO(); } } @Override public int getGroupCount() { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.getGroupCount(queue); } checkStarted(); clearIO(); try { return queue.getGroupCount(); } finally { blockOnIO(); } } @Override public String listGroupsAsJSON() throws Exception { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.listGroupsAsJSON(queue); } checkStarted(); clearIO(); try { Map<SimpleString, Consumer> groups = queue.getGroups(); JsonArrayBuilder jsonArray = JsonLoader.createArrayBuilder(); for (Map.Entry<SimpleString, Consumer> group : groups.entrySet()) { if (group.getValue() instanceof ServerConsumer) { ServerConsumer serverConsumer = (ServerConsumer) group.getValue(); JsonObjectBuilder obj = JsonLoader.createObjectBuilder().add("groupID", group.getKey().toString()).add("consumerID", serverConsumer.getID()).add("connectionID", serverConsumer.getConnectionID().toString()).add("sessionID", serverConsumer.getSessionID()).add("browseOnly", serverConsumer.isBrowseOnly()).add("creationTime", serverConsumer.getCreationTime()); jsonArray.add(obj); } } return jsonArray.build().toString(); } finally { blockOnIO(); } } @Override public long getRingSize() { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.getRingSize(queue); } checkStarted(); clearIO(); try { return queue.getRingSize(); } finally { blockOnIO(); } } @Override public String listConsumersAsJSON() throws Exception { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.listConsumersAsJSON(queue); } checkStarted(); clearIO(); try { Collection<Consumer> consumers = queue.getConsumers(); JsonArrayBuilder jsonArray = JsonLoader.createArrayBuilder(); for (Consumer consumer : consumers) { if (consumer instanceof ServerConsumer) { ServerConsumer serverConsumer = (ServerConsumer) consumer; JsonObjectBuilder obj = JsonLoader.createObjectBuilder().add("consumerID", serverConsumer.getID()).add("connectionID", serverConsumer.getConnectionID().toString()).add("sessionID", serverConsumer.getSessionID()).add("browseOnly", serverConsumer.isBrowseOnly()).add("creationTime", serverConsumer.getCreationTime()); jsonArray.add(obj); } } return jsonArray.build().toString(); } finally { blockOnIO(); } } @Override protected MBeanOperationInfo[] fillMBeanOperationInfo() { return MBeanInfoHelper.getMBeanOperationsInfo(QueueControl.class); } @Override protected MBeanAttributeInfo[] fillMBeanAttributeInfo() { return MBeanInfoHelper.getMBeanAttributesInfo(QueueControl.class); } @Override public void resetMessagesAdded() throws Exception { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.resetMessagesAdded(queue); } checkStarted(); clearIO(); try { queue.resetMessagesAdded(); } finally { blockOnIO(); } } @Override public void resetMessagesAcknowledged() throws Exception { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.resetMessagesAcknowledged(queue); } checkStarted(); clearIO(); try { queue.resetMessagesAcknowledged(); } finally { blockOnIO(); } } @Override public void resetMessagesExpired() throws Exception { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.resetMessagesExpired(queue); } checkStarted(); clearIO(); try { queue.resetMessagesExpired(); } finally { blockOnIO(); } } @Override public void resetMessagesKilled() throws Exception { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.resetMessagesKilled(queue); } checkStarted(); clearIO(); try { queue.resetMessagesKilled(); } finally { blockOnIO(); } } @Override public boolean isGroupRebalance() { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.isGroupRebalance(queue); } checkStarted(); clearIO(); try { return queue.isGroupRebalance(); } finally { blockOnIO(); } } @Override public boolean isGroupRebalancePauseDispatch() { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.isGroupRebalancePauseDispatch(queue); } checkStarted(); clearIO(); try { return queue.isGroupRebalancePauseDispatch(); } finally { blockOnIO(); } } @Override public int getGroupBuckets() { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.getGroupBuckets(queue); } checkStarted(); clearIO(); try { return queue.getGroupBuckets(); } finally { blockOnIO(); } } @Override public String getGroupFirstKey() { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.getGroupFirstKey(queue); } checkStarted(); clearIO(); try { SimpleString groupFirstKey = queue.getGroupFirstKey(); return groupFirstKey != null ? groupFirstKey.toString() : null; } finally { blockOnIO(); } } @Override public int getPreparedTransactionMessageCount() { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.getPreparedTransactionMessageCount(queue); } checkStarted(); clearIO(); try { int count = 0; ResourceManager resourceManager = server.getResourceManager(); if (resourceManager != null) { List<Xid> preparedTransactions = resourceManager.getPreparedTransactions(); for (Xid preparedTransaction : preparedTransactions) { Transaction transaction = resourceManager.getTransaction(preparedTransaction); if (transaction != null) { List<TransactionOperation> allOperations = transaction.getAllOperations(); for (TransactionOperation operation : allOperations) { if (operation instanceof RefsOperation) { RefsOperation refsOperation = (RefsOperation) operation; List<MessageReference> references = refsOperation.getReferencesToAcknowledge(); for (MessageReference reference : references) { if (reference != null && reference.getQueue().getName().equals(queue.getName())) { count++; } } } } } } } return count; } finally { blockOnIO(); } } @Override public void deliverScheduledMessages(String filter) throws Exception { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.deliverScheduledMessage(queue, filter); } checkStarted(); clearIO(); try { queue.deliverScheduledMessages(filter); } finally { blockOnIO(); } } @Override public void deliverScheduledMessage(long messageId) throws Exception { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.deliverScheduledMessage(queue, messageId); } checkStarted(); clearIO(); try { queue.deliverScheduledMessage(messageId); } finally { blockOnIO(); } } @Override public boolean isAutoDelete() { if (AuditLogger.isBaseLoggingEnabled()) { AuditLogger.isAutoDelete(queue); } checkStarted(); clearIO(); try { return queue.isAutoDelete(); } finally { blockOnIO(); } } private void checkStarted() { if (!server.getPostOffice().isStarted()) { throw new IllegalStateException("Broker is not started. Queue can not be managed yet"); } } }
ARTEMIS-4056 Fixing compatibility with getFirstMessage() if Empty the previous method was returning new Map[1] and the JSON output would be slightly different and I am playing safe here just in case.
artemis-server/src/main/java/org/apache/activemq/artemis/core/management/impl/QueueControlImpl.java
ARTEMIS-4056 Fixing compatibility with getFirstMessage()
<ide><path>rtemis-server/src/main/java/org/apache/activemq/artemis/core/management/impl/QueueControlImpl.java <ide> <ide> clearIO(); <ide> try { <del> List<Map<String, Object>> messages = new ArrayList<>(); <ide> final int attributeSizeLimit = addressSettingsRepository.getMatch(address).getManagementMessageAttributeSizeLimit(); <ide> MessageReference firstMessage = queue.peekFirstMessage(); <ide> if (firstMessage != null) { <ide> AuditLogger.getFirstMessageAsJSON(queue); <ide> } <ide> Map<String, Object> message = getFirstMessage(); <del> return toJSON(message == null ? new Map[0] : new Map[]{message}); <add> // I"m returning a new Map[1] in case of no first message, because older versions used to return that when null <add> // and I'm playing safe with the compatibility here. <add> return toJSON(message == null ? new Map[1] : new Map[]{message}); <ide> } <ide> <ide> @Override
Java
mit
e228e20c7ffd73119e4496e749c068f80cc1f150
0
breadwallet/breadwallet-android,breadwallet/breadwallet-android,breadwallet/breadwallet-android,breadwallet/breadwallet-android
package com.platform; import com.breadwallet.BuildConfig; /** * Created by byfieldj on 3/26/18. */ public class JsonRpcConstants { // Ethereum rpc endpoint public static final String BRD_ETH_RPC_ENDPOINT = BuildConfig.BITCOIN_TESTNET ? "/ethq/ropsten/proxy" : "ethq/mainnet/proxy"; public static final String ETH_RPC_TX_LIST = BuildConfig.BITCOIN_TESTNET ? "https://ropsten.etherscan.io/api?module=account&action=txlist&address=%s" : "https://api.etherscan.io/api?module=account&action=txlist&address=%s"; }
app/src/main/java/com/platform/JsonRpcConstants.java
package com.platform; /** * Created by byfieldj on 3/26/18. */ public class JsonRpcConstants { // Ethereum rpc endpoint public static final String BRD_ETH_RPC_ENDPOINT = "/ethq/ropsten/proxy"; public static final String ETH_RPC_TX_LIST = "https://ropsten.etherscan.io/api?module=account&action=txlist&address=%s"; }
use testnet or mainnet links accordingly
app/src/main/java/com/platform/JsonRpcConstants.java
use testnet or mainnet links accordingly
<ide><path>pp/src/main/java/com/platform/JsonRpcConstants.java <ide> package com.platform; <add> <add>import com.breadwallet.BuildConfig; <ide> <ide> /** <ide> * Created by byfieldj on 3/26/18. <ide> */ <ide> <ide> public class JsonRpcConstants { <del> <del> <ide> // Ethereum rpc endpoint <del> public static final String BRD_ETH_RPC_ENDPOINT = "/ethq/ropsten/proxy"; <del> public static final String ETH_RPC_TX_LIST = "https://ropsten.etherscan.io/api?module=account&action=txlist&address=%s"; <del> <add> public static final String BRD_ETH_RPC_ENDPOINT = BuildConfig.BITCOIN_TESTNET ? "/ethq/ropsten/proxy" : "ethq/mainnet/proxy"; <add> public static final String ETH_RPC_TX_LIST = BuildConfig.BITCOIN_TESTNET ? <add> "https://ropsten.etherscan.io/api?module=account&action=txlist&address=%s" : <add> "https://api.etherscan.io/api?module=account&action=txlist&address=%s"; <ide> <ide> }
Java
apache-2.0
ad29c5a9020f8e81b8f0aaba85a4c84e57361d1d
0
EBIBioSamples/biosamples-v4,EBIBioSamples/biosamples-v4,EBIBioSamples/biosamples-v4,EBIBioSamples/biosamples-v4
package uk.ac.ebi.biosamples.service; import java.util.List; import java.util.Optional; import org.springframework.hateoas.Link; import org.springframework.hateoas.Resource; import org.springframework.hateoas.ResourceAssembler; import org.springframework.hateoas.mvc.ControllerLinkBuilder; import org.springframework.stereotype.Service; import org.springframework.web.util.UriComponents; import org.springframework.web.util.UriComponentsBuilder; import uk.ac.ebi.biosamples.controller.SampleCurationLinksRestController; import uk.ac.ebi.biosamples.controller.SampleRestController; import uk.ac.ebi.biosamples.model.Sample; /** * This class is used by Spring to add HAL _links for {@Link Sample} objects. * * @author faulcon * */ @Service public class SampleResourceAssembler implements ResourceAssembler<Sample, Resource<Sample>> { public static final String REL_CURATIONDOMAIN="curationDomain"; public static final String REL_CURATIONLINKS="curationLinks"; public static final String REL_CURATIONLINK="curationLink"; public SampleResourceAssembler() { } private Link getSelfLink(String accession, Optional<Boolean> legacydetails, Optional<List<String>> curationDomains) { UriComponentsBuilder uriComponentsBuilder = ControllerLinkBuilder.linkTo(SampleRestController.class, accession).toUriComponentsBuilder(); if (legacydetails.isPresent() && legacydetails.get()) { uriComponentsBuilder.queryParam("legacydetails", legacydetails); } if (curationDomains != null && curationDomains.isPresent()) { if (curationDomains.get().size() == 0) { uriComponentsBuilder.queryParam("curationdomain", (Object[])null); } else { for (String curationDomain : curationDomains.get()) { uriComponentsBuilder.queryParam("curationdomain", curationDomain); } } } return new Link(uriComponentsBuilder.build().toUriString(), Link.REL_SELF); } private Link getCurationDomainLink(Link selfLink) { UriComponents selfUriComponents = UriComponentsBuilder.fromUriString(selfLink.getHref()).build(); if (selfUriComponents.getQueryParams().size() == 0) { return new Link(selfLink.getHref()+"{?curationdomain}", REL_CURATIONDOMAIN); } else { return new Link(selfLink.getHref()+"{&curationdomain}", REL_CURATIONDOMAIN); } } private Link getCurationLinksLink(String accession) { return ControllerLinkBuilder.linkTo( ControllerLinkBuilder.methodOn(SampleCurationLinksRestController.class) .getCurationLinkPageJson(accession, null, null)).withRel("curationLinks"); } private Link getCurationLinkLink(String accession) { return ControllerLinkBuilder.linkTo( ControllerLinkBuilder.methodOn(SampleCurationLinksRestController.class) .getCurationLinkJson(accession, null)).withRel("curationLink"); } public Resource<Sample> toResource(Sample sample, Optional<Boolean> legacydetails, Optional<List<String>> curationDomains) { Resource<Sample> sampleResource = new Resource<>(sample); sampleResource.add(getSelfLink(sample.getAccession(), legacydetails, curationDomains)); //add link to select curation domain sampleResource.add(getCurationDomainLink(sampleResource.getLink(Link.REL_SELF))); //add link to curationLinks on this sample sampleResource.add(getCurationLinksLink(sample.getAccession())); sampleResource.add(getCurationLinkLink(sample.getAccession())); return sampleResource; } @Override public Resource<Sample> toResource(Sample sample) { return toResource(sample, Optional.empty(), Optional.empty()); } }
webapps/core/src/main/java/uk/ac/ebi/biosamples/service/SampleResourceAssembler.java
package uk.ac.ebi.biosamples.service; import java.util.List; import java.util.Optional; import org.springframework.hateoas.Link; import org.springframework.hateoas.Resource; import org.springframework.hateoas.ResourceAssembler; import org.springframework.hateoas.mvc.ControllerLinkBuilder; import org.springframework.stereotype.Service; import org.springframework.web.util.UriComponents; import org.springframework.web.util.UriComponentsBuilder; import uk.ac.ebi.biosamples.controller.SampleCurationLinksRestController; import uk.ac.ebi.biosamples.controller.SampleRestController; import uk.ac.ebi.biosamples.model.Sample; /** * This class is used by Spring to add HAL _links for {@Link Sample} objects. * * @author faulcon * */ @Service public class SampleResourceAssembler implements ResourceAssembler<Sample, Resource<Sample>> { public static final String REL_CURATIONDOMAIN="curationDomain"; public static final String REL_CURATIONLINKS="curationLinks"; public static final String REL_CURATIONLINK="curationLink"; public SampleResourceAssembler() { } private Link getSelfLink(String accession, Optional<Boolean> legacydetails, Optional<List<String>> curationDomains) { UriComponentsBuilder uriComponentsBuilder = ControllerLinkBuilder.linkTo(SampleRestController.class, accession).toUriComponentsBuilder(); if (legacydetails.isPresent() && legacydetails.get()) { uriComponentsBuilder.queryParam("legacydetails", legacydetails); } if (curationDomains != null && curationDomains.isPresent()) { if (curationDomains.get().size() == 0) { uriComponentsBuilder.queryParam("curationdomain", (Object[])null); } else { for (String curationDomain : curationDomains.get()) { uriComponentsBuilder.queryParam("curationdomain", curationDomain); } } } return new Link(uriComponentsBuilder.build().toUriString(), Link.REL_SELF); } private Link getCurationDomainLink(Link selfLink) { UriComponents selfUriComponents = UriComponentsBuilder.fromUriString(selfLink.getHref()).build(); if (selfUriComponents.getQueryParams().size() == 0) { return new Link(selfLink.getHref()+"{?curationdomain}", REL_CURATIONDOMAIN); } else { return new Link(selfLink.getHref()+"{&curationdomain}", REL_CURATIONDOMAIN); } } private Link getCurationLinksLink(String accession) { return ControllerLinkBuilder.linkTo( ControllerLinkBuilder.methodOn(SampleCurationLinksRestController.class) .getCurationLinkPageJson(accession, null, null)).withRel("curationLinks"); } public Resource<Sample> toResource(Sample sample, Optional<Boolean> legacydetails, Optional<List<String>> curationDomains) { Resource<Sample> sampleResource = new Resource<>(sample); sampleResource.add(getSelfLink(sample.getAccession(), legacydetails, curationDomains)); //add link to select curation domain sampleResource.add(getCurationDomainLink(sampleResource.getLink(Link.REL_SELF))); //add link to curationLinks on this sample sampleResource.add(getCurationLinksLink(sample.getAccession())); return sampleResource; } @Override public Resource<Sample> toResource(Sample sample) { Resource<Sample> resource = new Resource<>(sample); resource.add(ControllerLinkBuilder.linkTo(SampleRestController.class, sample.getAccession()).withSelfRel()); resource.add(ControllerLinkBuilder.linkTo( ControllerLinkBuilder.methodOn(SampleCurationLinksRestController.class) .getCurationLinkPageJson(sample.getAccession(), null, null)).withRel("curationLinks")); resource.add(ControllerLinkBuilder.linkTo( ControllerLinkBuilder.methodOn(SampleCurationLinksRestController.class) .getCurationLinkJson(sample.getAccession(), null)).withRel("curationLink")); return resource; } }
fix curationLink rel generation
webapps/core/src/main/java/uk/ac/ebi/biosamples/service/SampleResourceAssembler.java
fix curationLink rel generation
<ide><path>ebapps/core/src/main/java/uk/ac/ebi/biosamples/service/SampleResourceAssembler.java <ide> ControllerLinkBuilder.methodOn(SampleCurationLinksRestController.class) <ide> .getCurationLinkPageJson(accession, null, null)).withRel("curationLinks"); <ide> } <add> <add> <add> private Link getCurationLinkLink(String accession) { <add> return ControllerLinkBuilder.linkTo( <add> ControllerLinkBuilder.methodOn(SampleCurationLinksRestController.class) <add> .getCurationLinkJson(accession, null)).withRel("curationLink"); <add> } <ide> <ide> public Resource<Sample> toResource(Sample sample, Optional<Boolean> legacydetails, Optional<List<String>> curationDomains) { <ide> Resource<Sample> sampleResource = new Resource<>(sample); <ide> sampleResource.add(getCurationDomainLink(sampleResource.getLink(Link.REL_SELF))); <ide> //add link to curationLinks on this sample <ide> sampleResource.add(getCurationLinksLink(sample.getAccession())); <add> sampleResource.add(getCurationLinkLink(sample.getAccession())); <ide> return sampleResource; <ide> } <ide> <ide> @Override <ide> public Resource<Sample> toResource(Sample sample) { <del> Resource<Sample> resource = new Resource<>(sample); <del> <del> resource.add(ControllerLinkBuilder.linkTo(SampleRestController.class, sample.getAccession()).withSelfRel()); <del> <del> resource.add(ControllerLinkBuilder.linkTo( <del> ControllerLinkBuilder.methodOn(SampleCurationLinksRestController.class) <del> .getCurationLinkPageJson(sample.getAccession(), null, null)).withRel("curationLinks")); <del> resource.add(ControllerLinkBuilder.linkTo( <del> ControllerLinkBuilder.methodOn(SampleCurationLinksRestController.class) <del> .getCurationLinkJson(sample.getAccession(), null)).withRel("curationLink")); <del> <del> return resource; <add> return toResource(sample, Optional.empty(), Optional.empty()); <ide> } <del> <ide> <ide> }
Java
epl-1.0
error: pathspec 'src/main/java/org/jtrfp/trcl/img/ColorUtils.java' did not match any file(s) known to git
701301286fedd9931f3196b9d0b819e69f45c050
1
jtrfp/terminal-recall,jtrfp/terminal-recall,jtrfp/terminal-recall
/******************************************************************************* * This file is part of TERMINAL RECALL * Copyright (c) 2012-2015 Chuck Ritola and contributors. * See Github project's commit log for contribution details. * All rights reserved. This program and the accompanying materials * are made available under the terms of the GNU Public License v3.0 * which accompanies this distribution, and is available at * http://www.gnu.org/licenses/gpl.html * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. * See the COPYING and CREDITS files for more details. * ******************************************************************************/ package org.jtrfp.trcl.img; import java.awt.Color; import org.jtrfp.trcl.math.Misc; public class ColorUtils { public static Color mul(Color l, Color r){ return new Color( (int)Misc.clamp((l.getRed()*r.getRed())/255, 0, 255), (int)Misc.clamp((l.getGreen()*r.getGreen())/255, 0, 255), (int)Misc.clamp((l.getBlue()*r.getBlue())/255, 0, 255), (int)Misc.clamp((l.getAlpha()*r.getAlpha())/255, 0, 255)); }//end mul() }//end ColorUtils
src/main/java/org/jtrfp/trcl/img/ColorUtils.java
Initial creation of ColorUtils.
src/main/java/org/jtrfp/trcl/img/ColorUtils.java
Initial creation of ColorUtils.
<ide><path>rc/main/java/org/jtrfp/trcl/img/ColorUtils.java <add>/******************************************************************************* <add> * This file is part of TERMINAL RECALL <add> * Copyright (c) 2012-2015 Chuck Ritola and contributors. <add> * See Github project's commit log for contribution details. <add> * All rights reserved. This program and the accompanying materials <add> * are made available under the terms of the GNU Public License v3.0 <add> * which accompanies this distribution, and is available at <add> * http://www.gnu.org/licenses/gpl.html <add> * This program is distributed in the hope that it will be useful, <add> * but WITHOUT ANY WARRANTY; without even the implied warranty <add> * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. <add> * See the COPYING and CREDITS files for more details. <add> * <add> ******************************************************************************/ <add> <add>package org.jtrfp.trcl.img; <add> <add>import java.awt.Color; <add> <add>import org.jtrfp.trcl.math.Misc; <add> <add>public class ColorUtils { <add> public static Color mul(Color l, Color r){ <add> return new Color( <add> (int)Misc.clamp((l.getRed()*r.getRed())/255, 0, 255), <add> (int)Misc.clamp((l.getGreen()*r.getGreen())/255, 0, 255), <add> (int)Misc.clamp((l.getBlue()*r.getBlue())/255, 0, 255), <add> (int)Misc.clamp((l.getAlpha()*r.getAlpha())/255, 0, 255)); <add> }//end mul() <add>}//end ColorUtils
Java
mit
f33937eb07c693d23503ff567535a2596e4a77b6
0
OreCruncher/DynamicSurroundings,OreCruncher/DynamicSurroundings
/* * This file is part of Dynamic Surroundings, licensed under the MIT License (MIT). * * Copyright (c) OreCruncher * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package org.blockartistry.DynSurround.client.fx.particle; import javax.annotation.Nonnull; import org.blockartistry.lib.random.XorShiftRandom; import net.minecraft.client.Minecraft; import net.minecraft.client.gui.FontRenderer; import net.minecraft.client.particle.Particle; import net.minecraft.client.renderer.entity.RenderManager; import net.minecraft.client.settings.GameSettings; import net.minecraft.util.ResourceLocation; import net.minecraft.world.World; import net.minecraftforge.fml.relauncher.Side; import net.minecraftforge.fml.relauncher.SideOnly; @SideOnly(Side.CLIENT) public abstract class ParticleBase extends Particle { protected final RenderManager manager = Minecraft.getMinecraft().getRenderManager(); protected final FontRenderer font = Minecraft.getMinecraft().fontRenderer; protected ParticleBase(@Nonnull final World worldIn, final double posXIn, final double posYIn, final double posZIn) { super(worldIn, posXIn, posYIn, posZIn); this.rand = XorShiftRandom.current(); } public ParticleBase(@Nonnull final World worldIn, final double xCoordIn, final double yCoordIn, final double zCoordIn, final double xSpeedIn, final double ySpeedIn, final double zSpeedIn) { super(worldIn, xCoordIn, yCoordIn, zCoordIn, xSpeedIn, ySpeedIn, zSpeedIn); this.rand = XorShiftRandom.current(); } protected double interpX() { return this.manager.viewerPosX; } protected double interpY() { return this.manager.viewerPosY; } protected double interpZ() { return this.manager.viewerPosZ; } protected void bindTexture(@Nonnull final ResourceLocation resource) { Minecraft.getMinecraft().getTextureManager().bindTexture(resource); } protected boolean isThirdPersonView() { final GameSettings settings = this.manager.options; return settings == null ? false : settings.thirdPersonView == 2; } }
src/main/java/org/blockartistry/DynSurround/client/fx/particle/ParticleBase.java
/* * This file is part of Dynamic Surroundings, licensed under the MIT License (MIT). * * Copyright (c) OreCruncher * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package org.blockartistry.DynSurround.client.fx.particle; import javax.annotation.Nonnull; import org.blockartistry.lib.random.XorShiftRandom; import net.minecraft.client.Minecraft; import net.minecraft.client.gui.FontRenderer; import net.minecraft.client.particle.Particle; import net.minecraft.client.renderer.entity.RenderManager; import net.minecraft.util.ResourceLocation; import net.minecraft.world.World; import net.minecraftforge.fml.relauncher.Side; import net.minecraftforge.fml.relauncher.SideOnly; @SideOnly(Side.CLIENT) public abstract class ParticleBase extends Particle { protected final RenderManager manager = Minecraft.getMinecraft().getRenderManager(); protected final FontRenderer font = Minecraft.getMinecraft().fontRenderer; protected ParticleBase(@Nonnull final World worldIn, final double posXIn, final double posYIn, final double posZIn) { super(worldIn, posXIn, posYIn, posZIn); this.rand = XorShiftRandom.current(); } public ParticleBase(@Nonnull final World worldIn, final double xCoordIn, final double yCoordIn, final double zCoordIn, final double xSpeedIn, final double ySpeedIn, final double zSpeedIn) { super(worldIn, xCoordIn, yCoordIn, zCoordIn, xSpeedIn, ySpeedIn, zSpeedIn); this.rand = XorShiftRandom.current(); } protected double interpX() { return this.manager.viewerPosX; } protected double interpY() { return this.manager.viewerPosY; } protected double interpZ() { return this.manager.viewerPosZ; } protected void bindTexture(@Nonnull final ResourceLocation resource) { Minecraft.getMinecraft().getTextureManager().bindTexture(resource); } protected boolean isThirdPersonView() { return this.manager.options.thirdPersonView == 2; } }
Race condition when loading laggy world
src/main/java/org/blockartistry/DynSurround/client/fx/particle/ParticleBase.java
Race condition when loading laggy world
<ide><path>rc/main/java/org/blockartistry/DynSurround/client/fx/particle/ParticleBase.java <ide> import net.minecraft.client.gui.FontRenderer; <ide> import net.minecraft.client.particle.Particle; <ide> import net.minecraft.client.renderer.entity.RenderManager; <add>import net.minecraft.client.settings.GameSettings; <ide> import net.minecraft.util.ResourceLocation; <ide> import net.minecraft.world.World; <ide> import net.minecraftforge.fml.relauncher.Side; <ide> } <ide> <ide> protected boolean isThirdPersonView() { <del> return this.manager.options.thirdPersonView == 2; <add> final GameSettings settings = this.manager.options; <add> return settings == null ? false : settings.thirdPersonView == 2; <ide> } <ide> }
JavaScript
apache-2.0
6966bd54eb199ba139a61cae9f27ac893bf7b960
0
dakingdog/AlexaMyChef
// Require your files or libraries here. You can use npm to install libraries. var Alexa = require('clay-alexa-sdk'); var USDAkey="Q2W8cDINhmomMkw2Qv91Vq3laaACY2NB8J54WsdI"; var https=require('https'); var optionsget={ host:'api.nal.usda.gov', api_key: USDAkey, format: 'json', port: 443, fg: 'fruit and fruit juices', max: 30, path: '/ndb/search/', method: 'GET' }; // do the GET request var reqGet = https.request(optionsget, function(res) { console.log("statusCode: ", res.statusCode); // uncomment it for header details // console.log("headers: ", res.headers); var fruit=[]; res.on('data', function(d) { const usdaFruit = JSON.parse(d); var length=usdaFruit.list.total; for (var x=0; x<length; x++){ fruit.push(usdaFruit.list.item[x].name); } }); }); // Array of possible Awesome things that Alexa can respond with. const awesomeSayings = [ "You are a force of nature.", "You are an inspiration to everyone that meets you.", "You are my Arnold", "You are incredible!", "Bill Gates wanted to know if you have any tips for him?", "how are you so fucking good at what you do?", "Boom-shacklaka You're on Fire", "I marvel at your accomplishments on an hourly basis", "Who is the most awesome person today? You. You are.", "I'm obsessed with you", "When you code, it's like you don't write code, you paint it. It's incredible.", "Everything is awesome. Everything is cool when you're part of a team" ] const meal = ["Try an apple walnut spinach salad sometime. It's a delicious, healthy meal loaded with polyunsaturated fats and plenty of vitamins.", "How about an omelette with chicken, onion, tomatoes, and peppers fried in olive oil?", "For a healthy breakfast, try having some fruit of your choice, milk, and some whole grain toast.", "No, Everything is a Meme."]; var breakfast = ["How about eggs fried in olive oil with french toast and a glass of low fat milk?", "Try a bowl of hot oatmeal with almonds, butter, and a bit of honey", "For a quick breakfast, fruits such as apples and bananas with trail mix is an excellent choice.", "Nut Butter, Banana, and Chia Seed Toast", "Berry and Yogurt Smoothie", "Savory Oatmeal With an Egg", "Quinoa Fruit Salad", "Tomato Toast With Macadamia Ricotta", "Quinoa and Chia Porridge, Almonds, and Sliced Peaches", "Avocado Toast With Egg, sprinkled with salt and pepper", "Everything is a meme", "Chocolate Quinoa Breakfast Bowl", "Overnight Crock-Pot Egg Casserole", "Warm Fruit Bowl made with cherries, raspberries, blueberries, sprinkled with dark chocolate and doused in low fat milk."]; var dessert = ["Get off your lazy ass and make yourself your own food", "MEMED", "Dairy-free Peanut Butter Chocolate Brittle Cake with almond frosting and glaze."]; exports.handler = function(event, context, callback) { // Write your Skill handler code here. This is where you // specify how your skill should respond. Make sure to write // a handler for each of your Intent types. console.log(event); var handlers = { 'GetMeal': function() { var selectedMeal = String(this.event.request.intent.slots.meal.value); var randomSaying; if (selectedMeal === "Meal"|| selectedMeal === "meal") { const randomSayingIndexMeal = Math.floor(Math.random() * meal.length); randomSaying = meal[randomSayingIndexMeal]; } if (selectedMeal === "Breakfast" || selectedMeal === "breakfast") { const randomSayingIndexBF = Math.floor(Math.random() * breakfast.length); randomSaying = breakfast[randomSayingIndexBF]; } if (selectedMeal === "Lunch" || selectedMeal === "lunch") { const randomSayingIndexL = Math.floor(Math.random() * meal.length); randomSaying = meal[randomSayingIndexL]; } if (selectedMeal === "Dinner" || selectedMeal === "dinner") { const randomSayingIndexD = Math.floor(Math.random() * meal.length); randomSaying = meal[randomSayingIndexD]; } if (selectedMeal === "Dessert" || selectedMeal === "dessert") { const randomSayingIndexDT = Math.floor(Math.random() * dessert.length); randomSaying = dessert[randomSayingIndexDT]; } // Choose a random saying from the awesomeSayings array. // Tell Alexa to speak that saying. this.emit(':tell', randomSaying); }, // Intent: GetAwesomeSaying returns a random saying from the // array of possible sayings awesomeSayings 'GetFruit': function() { // Choose a random saying from the awesomeSayings array. const randomSayingIndex = Math.floor(Math.random() * fruit.length); const randomSaying = fruit[randomSayingIndex]; // Tell Alexa to speak that saying. this.emit(':tell', randomSaying); }, // Intent: GetAwesomeSaying returns a random saying from the // array of possible sayings awesomeSayings 'GetAwesomeNumber': function() { // Choose a random number between 1-100 const randomNumber = Math.floor(Math.random() * 100); // Tell Alexa to speak that number this.emit(':tell', "The best number is " + randomNumber); }, // Intent: Launch. This is how Awesome Bot responds when there // aren't important. Important to have a Launch Intent to make sure // your skill passes Publishing Certification proces. 'LaunchRequest': function() { this.emit(':tell', "Hello. I'm Awesome Bot. You are awesome! Anytime you need a little boost just say Alexa ask awesome bot for an awesome saying!"); }, // Intent: Unhandled. The Unhandled intent is how Alexa responds when someone // asks for something that we don't handle explicitly 'Unhandled': function() { this.emit(':tell', "I'm not sure what you're asking for"); } }; // The event.body is the request object that has come in via HTTPS. // We need to JSON parse the body which holds the object since it's // been sent over HTTP. That object has information about what intent was passed. // You can see a sample of what the event body looks like in test-data.json var alexa = Alexa.handler(JSON.parse(event.body), context); // The register handler function sets up the handlers you specified above, // and then alexa.execute() calls the appropriate handler based on the Intent // passed in the Alexa request object (which here is in the event.body) alexa.registerHandlers(handlers); alexa.execute(); }
my-chef3/index.js
// Require your files or libraries here. You can use npm to install libraries. var Alexa = require('clay-alexa-sdk'); // var USDAkey="Q2W8cDINhmomMkw2Qv91Vq3laaACY2NB8J54WsdI"; // var https=require('https'); // var optionsget={ // host:'api.nal.usda.gov', // api_key: USDAkey, // format: 'json', // port: 443, // fg: 'fruit and fruit juices', // max: 30, // path: '/ndb/search/', // method: 'GET' // }; // // do the GET request // var reqGet = https.request(optionsget, function(res) { // console.log("statusCode: ", res.statusCode); // // uncomment it for header details // // console.log("headers: ", res.headers); // res.on('data', function(d) { // const usdaFruit = JSON.parse(d); // var length=usdaFruit.list.total; // for (var x=0; x<length; x++){ // fruit[x] = usdaFruit.list.item[x].name; // } // }); // }); // Array of possible Awesome things that Alexa can respond with. const awesomeSayings = [ "You are a force of nature.", "You are an inspiration to everyone that meets you.", "You are my Arnold", "You are incredible!", "Bill Gates wanted to know if you have any tips for him?", "how are you so fucking good at what you do?", "Boom-shacklaka You're on Fire", "I marvel at your accomplishments on an hourly basis", "Who is the most awesome person today? You. You are.", "I'm obsessed with you", "When you code, it's like you don't write code, you paint it. It's incredible.", "Everything is awesome. Everything is cool when you're part of a team" ] const meal = ["Try an apple walnut spinach salad sometime. It's a delicious, healthy meal loaded with polyunsaturated fats and plenty of vitamins.", "How about an omelette with chicken, onion, tomatoes, and peppers fried in olive oil?", "For a healthy breakfast, try having some fruit of your choice, milk, and some whole grain toast.", "No, Everything is a Meme."]; var breakfast = ["How about eggs fried in olive oil with french toast and a glass of low fat milk?", "Try a bowl of hot oatmeal with almonds, butter, and a bit of honey", "For a quick breakfast, fruits such as apples and bananas with trail mix is an excellent choice.", "Nut Butter, Banana, and Chia Seed Toast", "Berry and Yogurt Smoothie", "Savory Oatmeal With an Egg", "Quinoa Fruit Salad", "Tomato Toast With Macadamia Ricotta", "Quinoa and Chia Porridge, Almonds, and Sliced Peaches", "Avocado Toast With Egg, sprinkled with salt and pepper", "Everything is a meme", "Chocolate Quinoa Breakfast Bowl", "Overnight Crock-Pot Egg Casserole", "Warm Fruit Bowl made with cherries, raspberries, blueberries, sprinkled with dark chocolate and doused in low fat milk."]; var dessert = ["Get off your lazy ass and make yourself your own food"]; exports.handler = function(event, context, callback) { // Write your Skill handler code here. This is where you // specify how your skill should respond. Make sure to write // a handler for each of your Intent types. console.log(event); var handlers = { 'GetMeal': function() { var selectedMeal = String(this.event.request.intent.slots.meal.value); var randomSaying; if (selectedMeal === "Meal"|| selectedMeal === "meal") { const randomSayingIndexMeal = Math.floor(Math.random() * meal.length); randomSaying = meal[randomSayingIndexMeal]; } if (selectedMeal === "Breakfast" || selectedMeal === "breakfast") { const randomSayingIndexBF = Math.floor(Math.random() * breakfast.length); randomSaying = breakfast[randomSayingIndexBF]; } if (selectedMeal === "Lunch" || selectedMeal === "lunch") { const randomSayingIndexL = Math.floor(Math.random() * meal.length); randomSaying = meal[randomSayingIndexL]; } if (selectedMeal === "Dinner" || selectedMeal === "dinner") { const randomSayingIndexD = Math.floor(Math.random() * meal.length); randomSaying = meal[randomSayingIndexD]; } if (selectedMeal === "Dessert" || selectedMeal === "dessert") { const randomSayingIndexDT = Math.floor(Math.random() * desert.length); randomSaying = meal[randomSayingIndexDT]; } // Choose a random saying from the awesomeSayings array. // Tell Alexa to speak that saying. this.emit(':tell', randomSaying); }, // Intent: GetAwesomeSaying returns a random saying from the // array of possible sayings awesomeSayings 'GetAwesomeSaying': function() { // Choose a random saying from the awesomeSayings array. const randomSayingIndex = Math.floor(Math.random() * awesomeSayings.length); const randomSaying = awesomeSayings[randomSayingIndex]; // Tell Alexa to speak that saying. this.emit(':tell', randomSaying); }, // Intent: GetAwesomeSaying returns a random saying from the // array of possible sayings awesomeSayings 'GetAwesomeNumber': function() { // Choose a random number between 1-100 const randomNumber = Math.floor(Math.random() * 100); // Tell Alexa to speak that number this.emit(':tell', "The best number is " + randomNumber); }, // Intent: Launch. This is how Awesome Bot responds when there // aren't important. Important to have a Launch Intent to make sure // your skill passes Publishing Certification proces. 'LaunchRequest': function() { this.emit(':tell', "Hello. I'm Awesome Bot. You are awesome! Anytime you need a little boost just say Alexa ask awesome bot for an awesome saying!"); }, // Intent: Unhandled. The Unhandled intent is how Alexa responds when someone // asks for something that we don't handle explicitly 'Unhandled': function() { this.emit(':tell', "I'm not sure what you're asking for"); } }; // The event.body is the request object that has come in via HTTPS. // We need to JSON parse the body which holds the object since it's // been sent over HTTP. That object has information about what intent was passed. // You can see a sample of what the event body looks like in test-data.json var alexa = Alexa.handler(JSON.parse(event.body), context); // The register handler function sets up the handlers you specified above, // and then alexa.execute() calls the appropriate handler based on the Intent // passed in the Alexa request object (which here is in the event.body) alexa.registerHandlers(handlers); alexa.execute(); }
Testing fruit USDA db call
my-chef3/index.js
Testing fruit USDA db call
<ide><path>y-chef3/index.js <ide> // Require your files or libraries here. You can use npm to install libraries. <ide> var Alexa = require('clay-alexa-sdk'); <del>// var USDAkey="Q2W8cDINhmomMkw2Qv91Vq3laaACY2NB8J54WsdI"; <del>// var https=require('https'); <del>// var optionsget={ <del>// host:'api.nal.usda.gov', <del>// api_key: USDAkey, <del>// format: 'json', <del>// port: 443, <del>// fg: 'fruit and fruit juices', <del>// max: 30, <del>// path: '/ndb/search/', <del>// method: 'GET' <del>// }; <add>var USDAkey="Q2W8cDINhmomMkw2Qv91Vq3laaACY2NB8J54WsdI"; <add>var https=require('https'); <add>var optionsget={ <add> host:'api.nal.usda.gov', <add> api_key: USDAkey, <add> format: 'json', <add> port: 443, <add> fg: 'fruit and fruit juices', <add> max: 30, <add> path: '/ndb/search/', <add> method: 'GET' <add>}; <ide> <del>// // do the GET request <del>// var reqGet = https.request(optionsget, function(res) { <del>// console.log("statusCode: ", res.statusCode); <del>// // uncomment it for header details <del>// // console.log("headers: ", res.headers); <add>// do the GET request <add>var reqGet = https.request(optionsget, function(res) { <add> console.log("statusCode: ", res.statusCode); <add> // uncomment it for header details <add>// console.log("headers: ", res.headers); <add> var fruit=[]; <add> res.on('data', function(d) { <add> const usdaFruit = JSON.parse(d); <add> var length=usdaFruit.list.total; <add> for (var x=0; x<length; x++){ <add> fruit.push(usdaFruit.list.item[x].name); <add> } <add> }); <ide> <del>// res.on('data', function(d) { <del>// const usdaFruit = JSON.parse(d); <del>// var length=usdaFruit.list.total; <del>// for (var x=0; x<length; x++){ <del>// fruit[x] = usdaFruit.list.item[x].name; <del>// } <del>// }); <del> <del>// }); <add>}); <ide> // Array of possible Awesome things that Alexa can respond with. <ide> const awesomeSayings = [ <ide> "You are a force of nature.", <ide> "Chocolate Quinoa Breakfast Bowl", <ide> "Overnight Crock-Pot Egg Casserole", <ide> "Warm Fruit Bowl made with cherries, raspberries, blueberries, sprinkled with dark chocolate and doused in low fat milk."]; <del>var dessert = ["Get off your lazy ass and make yourself your own food"]; <add>var dessert = ["Get off your lazy ass and make yourself your own food", <add> "MEMED", <add> "Dairy-free Peanut Butter Chocolate Brittle Cake with almond frosting and glaze."]; <ide> exports.handler = function(event, context, callback) { <ide> <ide> // Write your Skill handler code here. This is where you <ide> randomSaying = meal[randomSayingIndexD]; <ide> } <ide> if (selectedMeal === "Dessert" || selectedMeal === "dessert") { <del> const randomSayingIndexDT = Math.floor(Math.random() * desert.length); <del> randomSaying = meal[randomSayingIndexDT]; <add> const randomSayingIndexDT = Math.floor(Math.random() * dessert.length); <add> randomSaying = dessert[randomSayingIndexDT]; <ide> } <ide> // Choose a random saying from the awesomeSayings array. <ide> <ide> }, <ide> // Intent: GetAwesomeSaying returns a random saying from the <ide> // array of possible sayings awesomeSayings <del> 'GetAwesomeSaying': function() { <add> 'GetFruit': function() { <ide> <ide> // Choose a random saying from the awesomeSayings array. <del> const randomSayingIndex = Math.floor(Math.random() * awesomeSayings.length); <del> const randomSaying = awesomeSayings[randomSayingIndex]; <add> const randomSayingIndex = Math.floor(Math.random() * fruit.length); <add> const randomSaying = fruit[randomSayingIndex]; <ide> <ide> // Tell Alexa to speak that saying. <ide> this.emit(':tell', randomSaying);
Java
apache-2.0
11f288bf9d32704f3cd0d1e2dad0810cb77979ae
0
tharikaGitHub/product-apim,chamilaadhi/product-apim,ChamNDeSilva/product-apim,amalkasubasinghe/product-apim,ChamNDeSilva/product-apim,dhanuka84/product-apim,jaadds/product-apim,sambaheerathan/product-apim,hevayo/product-apim,sambaheerathan/product-apim,lakmali/product-apim,thilinicooray/product-apim,dhanuka84/product-apim,pradeepmurugesan/product-apim,dhanuka84/product-apim,chamilaadhi/product-apim,wso2/product-apim,hevayo/product-apim,jaadds/product-apim,dhanuka84/product-apim,thilinicooray/product-apim,jaadds/product-apim,dewmini/product-apim,rswijesena/product-apim,pradeepmurugesan/product-apim,dewmini/product-apim,nu1silva/product-apim,dewmini/product-apim,thilinicooray/product-apim,irhamiqbal/product-apim,wso2/product-apim,jaadds/product-apim,amalkasubasinghe/product-apim,tharindu1st/product-apim,wso2/product-apim,wso2/product-apim,rswijesena/product-apim,pradeepmurugesan/product-apim,tharikaGitHub/product-apim,irhamiqbal/product-apim,chamilaadhi/product-apim,chamilaadhi/product-apim,abimarank/product-apim,tharikaGitHub/product-apim,amalkasubasinghe/product-apim,amalkasubasinghe/product-apim,wso2/product-apim,tharikaGitHub/product-apim,dewmini/product-apim,hevayo/product-apim,dewmini/product-apim,nu1silva/product-apim,nu1silva/product-apim,nu1silva/product-apim,lakmali/product-apim,pradeepmurugesan/product-apim,nu1silva/product-apim,irhamiqbal/product-apim,irhamiqbal/product-apim,chamilaadhi/product-apim,abimarank/product-apim,tharikaGitHub/product-apim,hevayo/product-apim,thilinicooray/product-apim
/* *Copyright (c) 2015, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * *WSO2 Inc. licenses this file to you under the Apache License, *Version 2.0 (the "License"); you may not use this file except *in compliance with the License. *You may obtain a copy of the License at * *http://www.apache.org/licenses/LICENSE-2.0 * *Unless required by applicable law or agreed to in writing, *software distributed under the License is distributed on an *"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY *KIND, either express or implied. See the License for the *specific language governing permissions and limitations *under the License. */ package org.wso2.am.integration.tests.throttling; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.testng.Assert; import org.testng.annotations.BeforeClass; import org.testng.annotations.DataProvider; import org.testng.annotations.Factory; import org.testng.annotations.Test; import org.wso2.am.integration.test.utils.base.APIMIntegrationBaseTest; import org.wso2.am.integration.test.utils.bean.APIThrottlingTierRequest; import org.wso2.am.integration.test.utils.clients.AdminDashboardRestClient; import org.wso2.carbon.automation.engine.annotations.ExecutionEnvironment; import org.wso2.carbon.automation.engine.annotations.SetEnvironment; import org.wso2.carbon.automation.engine.context.TestUserMode; import org.wso2.carbon.automation.test.utils.http.client.HttpResponse; @SetEnvironment(executionEnvironments = {ExecutionEnvironment.STANDALONE}) public class APITierManagementTestCase extends APIMIntegrationBaseTest { private final Log log = LogFactory.getLog(APITierManagementTestCase.class); private AdminDashboardRestClient adminDashboard; private String tierName = "SampleTier"; @Factory(dataProvider = "userModeDataProvider") public APITierManagementTestCase(TestUserMode userMode) { this.userMode = userMode; } @DataProvider public static Object[][] userModeDataProvider() { return new Object[][]{ new Object[]{TestUserMode.SUPER_TENANT_ADMIN}, new Object[]{TestUserMode.TENANT_ADMIN}, }; } @BeforeClass(alwaysRun = true) public void setEnvironment() throws Exception { super.init(userMode); String storeURLHttp = getStoreURLHttp(); adminDashboard = new AdminDashboardRestClient(storeURLHttp); adminDashboard.login(user.getUserName(), user.getPassword()); } @Test(groups = {"wso2.am"}, description = "Add tier through admin-dashboard") public void addTierTestCase() throws Exception { APIThrottlingTierRequest tierRequest = new APIThrottlingTierRequest(tierName, "5", "Sample Tier", "120", "true", "FREE"); tierRequest.setAction("addTier"); HttpResponse addTierResponse = adminDashboard.addTier(tierRequest); verifyResponse(addTierResponse); String getTiersResponse = adminDashboard.getAllTiers().getData(); log.info(getTiersResponse); Assert.assertTrue(getTiersResponse.contains(tierName), "Added tier not found"); } @Test(groups = {"wso2.am"}, description = "Delete tier through admin-dashboard") public void deleteTierTestCase() throws Exception { HttpResponse addTierResponse = adminDashboard.deleteTier(tierName); verifyResponse(addTierResponse); String getTiersResponse = adminDashboard.getAllTiers().getData(); log.info(getTiersResponse); Assert.assertTrue(!getTiersResponse.contentEquals(tierName), "Tier not deleted"); } }
modules/integration/tests-integration/tests-backend/src/test/java/org/wso2/am/integration/tests/throttling/APITierManagementTestCase.java
/* *Copyright (c) 2015, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * *WSO2 Inc. licenses this file to you under the Apache License, *Version 2.0 (the "License"); you may not use this file except *in compliance with the License. *You may obtain a copy of the License at * *http://www.apache.org/licenses/LICENSE-2.0 * *Unless required by applicable law or agreed to in writing, *software distributed under the License is distributed on an *"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY *KIND, either express or implied. See the License for the *specific language governing permissions and limitations *under the License. */ package org.wso2.am.integration.tests.throttling; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.testng.Assert; import org.testng.annotations.BeforeClass; import org.testng.annotations.DataProvider; import org.testng.annotations.Factory; import org.testng.annotations.Test; import org.wso2.am.integration.test.utils.base.APIMIntegrationBaseTest; import org.wso2.am.integration.test.utils.bean.APIThrottlingTierRequest; import org.wso2.am.integration.test.utils.clients.AdminDashboardRestClient; import org.wso2.carbon.automation.engine.annotations.ExecutionEnvironment; import org.wso2.carbon.automation.engine.annotations.SetEnvironment; import org.wso2.carbon.automation.engine.context.TestUserMode; import org.wso2.carbon.automation.test.utils.http.client.HttpResponse; @SetEnvironment(executionEnvironments = {ExecutionEnvironment.STANDALONE}) public class APITierManagementTestCase extends APIMIntegrationBaseTest { private final Log log = LogFactory.getLog(APITierManagementTestCase.class); private AdminDashboardRestClient adminDashboard; private String tierName = "SmapleTier"; @Factory(dataProvider = "userModeDataProvider") public APITierManagementTestCase(TestUserMode userMode) { this.userMode = userMode; } @DataProvider public static Object[][] userModeDataProvider() { return new Object[][]{ new Object[]{TestUserMode.SUPER_TENANT_ADMIN}, new Object[]{TestUserMode.TENANT_ADMIN}, }; } @BeforeClass(alwaysRun = true) public void setEnvironment() throws Exception { super.init(userMode); String storeURLHttp = getStoreURLHttp(); adminDashboard = new AdminDashboardRestClient(storeURLHttp); adminDashboard.login(user.getUserName(), user.getPassword()); } @Test(groups = {"wso2.am"}, description = "Add tier through admin-dashboard") public void addTierTestCase() throws Exception { APIThrottlingTierRequest tierRequest = new APIThrottlingTierRequest(tierName, "5", "Sample Tier", "120", "true", "FREE"); tierRequest.setAction("addTier"); HttpResponse addTierResponse = adminDashboard.addTier(tierRequest); verifyResponse(addTierResponse); String getTiersResponse = adminDashboard.getAllTiers().getData(); log.info(getTiersResponse); Assert.assertTrue(getTiersResponse.contains(tierName), "Added tier not found"); } @Test(groups = {"wso2.am"}, description = "Delete tier through admin-dashboard") public void deleteTierTestCase() throws Exception { HttpResponse addTierResponse = adminDashboard.deleteTier(tierName); verifyResponse(addTierResponse); String getTiersResponse = adminDashboard.getAllTiers().getData(); log.info(getTiersResponse); Assert.assertTrue(!getTiersResponse.contentEquals(tierName), "Tier not deleted"); } }
Fixing typo
modules/integration/tests-integration/tests-backend/src/test/java/org/wso2/am/integration/tests/throttling/APITierManagementTestCase.java
Fixing typo
<ide><path>odules/integration/tests-integration/tests-backend/src/test/java/org/wso2/am/integration/tests/throttling/APITierManagementTestCase.java <ide> private final Log log = LogFactory.getLog(APITierManagementTestCase.class); <ide> <ide> private AdminDashboardRestClient adminDashboard; <del> private String tierName = "SmapleTier"; <add> private String tierName = "SampleTier"; <ide> <ide> @Factory(dataProvider = "userModeDataProvider") <ide> public APITierManagementTestCase(TestUserMode userMode) {
Java
bsd-3-clause
ca6176c8be318088a2be01b581675081652b651b
0
NCIP/cadsr-bulk-loader,NCIP/cadsr-bulk-loader
package gov.nih.nci.ncicb.cadsr.bulkloader.util; import gov.nih.nci.ncicb.cadsr.bulkloader.beans.UnloadProperties; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import javax.sql.DataSource; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.springframework.dao.DataAccessException; import org.springframework.jdbc.core.JdbcTemplate; import org.springframework.jdbc.core.PreparedStatementCallback; import org.springframework.jdbc.core.ResultSetExtractor; public class BulkLoaderUnclassifier { private static Log log = LogFactory.getLog(BulkLoaderUnclassifier.class.getName()); private DataSource dataSource; private String checkCsCsiQry; private String csLatestVersionQry; private String csiLatestVersionQry; private String deleteDefsQry; private String deleteAltNamesQry; private String deleteAttrsQry; private String deleteAdminCompCSCSIQry; private String csName; private Double csVersion; private String csiName; private Double csiVersion; public DataSource getDataSource() { return dataSource; } public void setDataSource(DataSource dataSource) { this.dataSource = dataSource; } public String getCheckCsCsiQry() { return checkCsCsiQry; } public void setCheckCsCsiQry(String checkCsCsiQry) { this.checkCsCsiQry = checkCsCsiQry; } public String getCsLatestVersionQry() { return csLatestVersionQry; } public void setCsLatestVersionQry(String csLatestVersionQry) { this.csLatestVersionQry = csLatestVersionQry; } public String getCsiLatestVersionQry() { return csiLatestVersionQry; } public void setCsiLatestVersionQry(String csiLatestVersionQry) { this.csiLatestVersionQry = csiLatestVersionQry; } public String getDeleteDefsQry() { return deleteDefsQry; } public void setDeleteDefsQry(String deleteDefsQry) { this.deleteDefsQry = deleteDefsQry; } public String getDeleteAltNamesQry() { return deleteAltNamesQry; } public void setDeleteAltNamesQry(String deleteAltNamesQry) { this.deleteAltNamesQry = deleteAltNamesQry; } public String getDeleteAttrsQry() { return deleteAttrsQry; } public void setDeleteAttrsQry(String deleteAttrsQry) { this.deleteAttrsQry = deleteAttrsQry; } public String getDeleteAdminCompCSCSIQry() { return deleteAdminCompCSCSIQry; } public void setDeleteAdminCompCSCSIQry(String deleteAdminCompCSCSIQry) { this.deleteAdminCompCSCSIQry = deleteAdminCompCSCSIQry; } public synchronized void unclassify(UnloadProperties unloadProperties) { init(unloadProperties); if (checkCSCSI()) { log.info("Unclassifying Definitions..."); deleteDefinitions(); log.info("Unclassifying Alternate Names..."); deleteAlternateNames(); log.info("Unclassifying Attributes..."); deleteAttributes(); log.info("Unclassifying Admin Components..."); deleteAdminComponentCSCSI(); } log.info("Unclassification complete!"); } private void init(UnloadProperties unloadProperties) { this.csName = unloadProperties.getClassificationSchemeName(); this.csiName = unloadProperties.getClassificationSchemeItemName(); this.csVersion = getCSVersion(unloadProperties); this.csiVersion = getCSIVersion(unloadProperties); } private Double getCSVersion(UnloadProperties unloadProperties) { Double csVersion = new Double(unloadProperties.getCsVersion()); if (csVersion.equals(new Double(0.0))) { csVersion = getLatestCSVersion(unloadProperties); } return csVersion; } private Double getLatestCSVersion(UnloadProperties unloadProperties) { JdbcTemplate jdbcTemplate = new JdbcTemplate(dataSource); String csName = unloadProperties.getClassificationSchemeName(); Double latestVersion = (Double)jdbcTemplate.query(csLatestVersionQry, new Object[]{csName}, new ResultSetExtractor() { @Override public Object extractData(ResultSet rs) throws SQLException, DataAccessException { if (rs.next()) { return new Double(rs.getDouble(1)); } return new Double(0.0); } }); return latestVersion; } private Double getCSIVersion(UnloadProperties unloadProperties) { Double csiVersion = new Double(unloadProperties.getCsiVersion()); if (csiVersion.equals(new Double(0.0))) { csiVersion = getLatestCSIVersion(unloadProperties); } return csiVersion; } private Double getLatestCSIVersion(UnloadProperties unloadProperties) { JdbcTemplate jdbcTemplate = new JdbcTemplate(dataSource); String csName = unloadProperties.getClassificationSchemeName(); String csiName = unloadProperties.getClassificationSchemeItemName(); Double latestVersion = (Double)jdbcTemplate.query(csiLatestVersionQry, new Object[]{csiName, csName}, new ResultSetExtractor() { @Override public Object extractData(ResultSet rs) throws SQLException, DataAccessException { if (rs.next()) { return new Double(rs.getDouble(1)); } return new Double(0.0); } }); return latestVersion; } private boolean checkCSCSI() { JdbcTemplate jdbcTemplate = new JdbcTemplate(dataSource); Boolean checkCsCsi = (Boolean)jdbcTemplate.query(checkCsCsiQry, new Object[]{csName, csVersion, csiName, csiVersion}, new ResultSetExtractor() { @Override public Object extractData(ResultSet rs) throws SQLException, DataAccessException { if (rs.next()) { return new Boolean(true); } return new Boolean(false); } }); return checkCsCsi; } private void deleteDefinitions() { runDeleteQry(this.deleteDefsQry, new Object[]{csName, csVersion, csiName, csiVersion}); } private void deleteAlternateNames() { runDeleteQry(this.deleteAltNamesQry, new Object[]{csName, csVersion, csiName, csiVersion}); } private void deleteAttributes() { runDeleteQry(this.deleteAttrsQry, new Object[]{csName, csVersion, csiName, csiVersion}); } private void deleteAdminComponentCSCSI() { runDeleteQry(this.deleteAdminCompCSCSIQry, new Object[]{csName, csVersion, csiName, csiVersion}); } private void runDeleteQry(String qry, final Object[] args) { JdbcTemplate jdbcTemplate = new JdbcTemplate(dataSource); jdbcTemplate.execute(qry, new PreparedStatementCallback() { @Override public Object doInPreparedStatement(PreparedStatement ps) throws SQLException, DataAccessException { for (int i=0;i<args.length;i++) { Object o = args[i]; if (o instanceof String) { ps.setString(i+1, (String) o); } else if (o instanceof Integer) { ps.setInt(i+1, (Integer) o); } else if (o instanceof Double) { ps.setDouble(i+1, (Double) o); } } return null; } }); } }
cadsrbulkloader/src/main/java/gov/nih/nci/ncicb/cadsr/bulkloader/util/BulkLoaderUnclassifier.java
package gov.nih.nci.ncicb.cadsr.bulkloader.util; import gov.nih.nci.ncicb.cadsr.bulkloader.beans.LoadProperties; import gov.nih.nci.ncicb.cadsr.bulkloader.beans.UnloadProperties; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import javax.sql.DataSource; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.springframework.dao.DataAccessException; import org.springframework.jdbc.core.JdbcTemplate; import org.springframework.jdbc.core.PreparedStatementCallback; import org.springframework.jdbc.core.ResultSetExtractor; public class BulkLoaderUnclassifier { private static Log log = LogFactory.getLog(BulkLoaderUnclassifier.class.getName()); private DataSource dataSource; private String checkCsCsiQry; private String csLatestVersionQry; private String csiLatestVersionQry; private String deleteDefsQry; private String deleteAltNamesQry; private String deleteAttrsQry; private String deleteAdminCompCSCSIQry; private String csName; private Double csVersion; private String csiName; private Double csiVersion; public DataSource getDataSource() { return dataSource; } public void setDataSource(DataSource dataSource) { this.dataSource = dataSource; } public String getCheckCsCsiQry() { return checkCsCsiQry; } public void setCheckCsCsiQry(String checkCsCsiQry) { this.checkCsCsiQry = checkCsCsiQry; } public String getCsLatestVersionQry() { return csLatestVersionQry; } public void setCsLatestVersionQry(String csLatestVersionQry) { this.csLatestVersionQry = csLatestVersionQry; } public String getCsiLatestVersionQry() { return csiLatestVersionQry; } public void setCsiLatestVersionQry(String csiLatestVersionQry) { this.csiLatestVersionQry = csiLatestVersionQry; } public String getDeleteDefsQry() { return deleteDefsQry; } public void setDeleteDefsQry(String deleteDefsQry) { this.deleteDefsQry = deleteDefsQry; } public String getDeleteAltNamesQry() { return deleteAltNamesQry; } public void setDeleteAltNamesQry(String deleteAltNamesQry) { this.deleteAltNamesQry = deleteAltNamesQry; } public String getDeleteAttrsQry() { return deleteAttrsQry; } public void setDeleteAttrsQry(String deleteAttrsQry) { this.deleteAttrsQry = deleteAttrsQry; } public String getDeleteAdminCompCSCSIQry() { return deleteAdminCompCSCSIQry; } public void setDeleteAdminCompCSCSIQry(String deleteAdminCompCSCSIQry) { this.deleteAdminCompCSCSIQry = deleteAdminCompCSCSIQry; } public synchronized void unclassify(UnloadProperties unloadProperties) { init(unloadProperties); if (checkCSCSI()) { log.info("Unclassifying Definitions..."); deleteDefinitions(); log.info("Unclassifying Alternate Names..."); deleteAlternateNames(); log.info("Unclassifying Attributes..."); deleteAttributes(); log.info("Unclassifying Admin Components..."); deleteAdminComponentCSCSI(); } } private void init(UnloadProperties unloadProperties) { this.csName = unloadProperties.getClassificationSchemeName(); this.csiName = unloadProperties.getClassificationSchemeItemName(); this.csVersion = getCSVersion(unloadProperties); this.csiVersion = getCSIVersion(unloadProperties); } private Double getCSVersion(UnloadProperties unloadProperties) { Double csVersion = new Double(unloadProperties.getCsVersion()); if (csVersion.equals(new Double(0.0))) { csVersion = getLatestCSVersion(unloadProperties); } return csVersion; } private Double getLatestCSVersion(UnloadProperties unloadProperties) { JdbcTemplate jdbcTemplate = new JdbcTemplate(dataSource); String csName = unloadProperties.getClassificationSchemeName(); Double latestVersion = (Double)jdbcTemplate.query(csLatestVersionQry, new Object[]{csName}, new ResultSetExtractor() { @Override public Object extractData(ResultSet rs) throws SQLException, DataAccessException { if (rs.next()) { return new Double(rs.getDouble(1)); } return new Double(0.0); } }); return latestVersion; } private Double getCSIVersion(UnloadProperties unloadProperties) { Double csiVersion = new Double(unloadProperties.getCsiVersion()); if (csiVersion.equals(new Double(0.0))) { csiVersion = getLatestCSIVersion(unloadProperties); } return csiVersion; } private Double getLatestCSIVersion(UnloadProperties unloadProperties) { JdbcTemplate jdbcTemplate = new JdbcTemplate(dataSource); String csName = unloadProperties.getClassificationSchemeName(); String csiName = unloadProperties.getClassificationSchemeItemName(); Double latestVersion = (Double)jdbcTemplate.query(csiLatestVersionQry, new Object[]{csiName, csName}, new ResultSetExtractor() { @Override public Object extractData(ResultSet rs) throws SQLException, DataAccessException { if (rs.next()) { return new Double(rs.getDouble(1)); } return new Double(0.0); } }); return latestVersion; } private boolean checkCSCSI() { JdbcTemplate jdbcTemplate = new JdbcTemplate(dataSource); Boolean checkCsCsi = (Boolean)jdbcTemplate.query(checkCsCsiQry, new Object[]{csName, csVersion, csiName, csiVersion}, new ResultSetExtractor() { @Override public Object extractData(ResultSet rs) throws SQLException, DataAccessException { if (rs.next()) { return new Boolean(true); } return new Boolean(false); } }); return checkCsCsi; } private void deleteDefinitions() { runDeleteQry(this.deleteDefsQry, new Object[]{csName, csVersion, csiName, csiVersion}); } private void deleteAlternateNames() { runDeleteQry(this.deleteAltNamesQry, new Object[]{csName, csVersion, csiName, csiVersion}); } private void deleteAttributes() { runDeleteQry(this.deleteAttrsQry, new Object[]{csName, csVersion, csiName, csiVersion}); } private void deleteAdminComponentCSCSI() { runDeleteQry(this.deleteAdminCompCSCSIQry, new Object[]{csName, csVersion, csiName, csiVersion}); } private void runDeleteQry(String qry, final Object[] args) { JdbcTemplate jdbcTemplate = new JdbcTemplate(dataSource); jdbcTemplate.execute(qry, new PreparedStatementCallback() { @Override public Object doInPreparedStatement(PreparedStatement ps) throws SQLException, DataAccessException { for (int i=0;i<args.length;i++) { Object o = args[i]; if (o instanceof String) { ps.setString(i, (String) o); } else if (o instanceof Integer) { ps.setInt(i, (Integer) o); } else if (o instanceof Double) { ps.setDouble(i, (Double) o); } } return null; } }); } }
bug fix for incorrect prepared statement index SVN-Revision: 60
cadsrbulkloader/src/main/java/gov/nih/nci/ncicb/cadsr/bulkloader/util/BulkLoaderUnclassifier.java
bug fix for incorrect prepared statement index
<ide><path>adsrbulkloader/src/main/java/gov/nih/nci/ncicb/cadsr/bulkloader/util/BulkLoaderUnclassifier.java <ide> package gov.nih.nci.ncicb.cadsr.bulkloader.util; <ide> <del>import gov.nih.nci.ncicb.cadsr.bulkloader.beans.LoadProperties; <ide> import gov.nih.nci.ncicb.cadsr.bulkloader.beans.UnloadProperties; <ide> <ide> import java.sql.PreparedStatement; <ide> log.info("Unclassifying Admin Components..."); <ide> deleteAdminComponentCSCSI(); <ide> } <add> log.info("Unclassification complete!"); <ide> } <ide> <ide> private void init(UnloadProperties unloadProperties) { <ide> for (int i=0;i<args.length;i++) { <ide> Object o = args[i]; <ide> if (o instanceof String) { <del> ps.setString(i, (String) o); <add> ps.setString(i+1, (String) o); <ide> } <ide> else if (o instanceof Integer) { <del> ps.setInt(i, (Integer) o); <add> ps.setInt(i+1, (Integer) o); <ide> } <ide> else if (o instanceof Double) { <del> ps.setDouble(i, (Double) o); <add> ps.setDouble(i+1, (Double) o); <ide> } <ide> } <ide>
Java
bsd-2-clause
739984225f1074cdeb27efa84539472b45be2d69
0
edgehosting/jira-dvcs-connector,edgehosting/jira-dvcs-connector,markphip/testing,markphip/testing,edgehosting/jira-dvcs-connector,markphip/testing
package com.atlassian.jira.plugins.dvcs.pageobjects.component; import com.atlassian.pageobjects.elements.PageElement; import org.openqa.selenium.By; import static com.atlassian.pageobjects.elements.query.Conditions.and; import static com.atlassian.pageobjects.elements.query.Poller.by; import static com.atlassian.pageobjects.elements.query.Poller.waitUntil; import static com.atlassian.pageobjects.elements.query.Poller.waitUntilTrue; import static java.util.concurrent.TimeUnit.SECONDS; import static org.hamcrest.Matchers.is; public class RepositoryDiv { private final PageElement rootElement; private final PageElement syncRadio; public RepositoryDiv(PageElement rootElement) { this.rootElement = rootElement; this.syncRadio = rootElement != null? rootElement.find(By.className("radio")) : null; } public String getMessage() { return rootElement.find(By.xpath("td[3]/div")).getText(); } public String getRepositoryName() { return rootElement.find(By.xpath("td[2]/a")).getText(); } public PageElement getSyncIcon() { return rootElement.find(By.xpath("td[4]//span")); } public String getElementId() { return rootElement.getAttribute("id"); } public String getRepositoryId() { return parseRepositoryId(getElementId()); } public String parseRepositoryId(String elementId) { return elementId.substring(elementId.lastIndexOf("-") + 1); } public void enableSync() { if (syncRadio != null) { waitUntilTrue("Sync radio should always be enabled", syncRadio.timed().isEnabled()); if (!syncRadio.isSelected()) { syncRadio.click(); waitUntilTrue(syncRadio.timed().isSelected()); } } } public void sync() { final PageElement syncIcon = getSyncIcon(); waitUntilTrue(and(syncIcon.timed().isPresent(), syncIcon.timed().isEnabled(), syncIcon.timed().isVisible())); syncIcon.click(); waitUntilTrue(and(syncIcon.timed().isPresent(), syncIcon.timed().isVisible(), syncIcon.timed().hasClass("running"))); waitUntil(syncIcon.timed().hasClass("running"), is(false), by(60, SECONDS)); } }
jira-dvcs-connector-pageobjects/src/main/java/com/atlassian/jira/plugins/dvcs/pageobjects/component/RepositoryDiv.java
package com.atlassian.jira.plugins.dvcs.pageobjects.component; import com.atlassian.pageobjects.elements.PageElement; import org.openqa.selenium.By; import static com.atlassian.pageobjects.elements.query.Conditions.and; import static com.atlassian.pageobjects.elements.query.Poller.by; import static com.atlassian.pageobjects.elements.query.Poller.waitUntil; import static com.atlassian.pageobjects.elements.query.Poller.waitUntilTrue; import static com.atlassian.pageobjects.elements.timeout.TimeoutType.AJAX_ACTION; import static java.util.concurrent.TimeUnit.SECONDS; import static org.hamcrest.Matchers.is; public class RepositoryDiv { private final PageElement rootElement; private final PageElement syncRadio; public RepositoryDiv(PageElement rootElement) { this.rootElement = rootElement; this.syncRadio = rootElement != null? rootElement.find(By.className("radio")) : null; } public String getMessage() { return rootElement.find(By.xpath("td[3]/div")).getText(); } public String getRepositoryName() { return rootElement.find(By.xpath("td[2]/a")).getText(); } public PageElement getSyncIcon() { return rootElement.find(By.xpath("td[4]//span")); } public String getElementId() { return rootElement.getAttribute("id"); } public String getRepositoryId() { return parseRepositoryId(getElementId()); } public String parseRepositoryId(String elementId) { return elementId.substring(elementId.lastIndexOf("-") + 1); } public void enableSync() { if (syncRadio != null) { waitUntilTrue("Sync radio should always be enabled", syncRadio.timed().isEnabled()); if (!syncRadio.isSelected()) { syncRadio.click(); waitUntilTrue(syncRadio.timed().isSelected()); } } } public void sync() { final PageElement syncIcon = getSyncIcon(); waitUntilTrue(and(syncIcon.withTimeout(AJAX_ACTION).timed().isPresent(), syncIcon.withTimeout(AJAX_ACTION).timed().isEnabled(), syncIcon.withTimeout(AJAX_ACTION).timed().isVisible())); syncIcon.click(); waitUntilTrue(and(syncIcon.timed().isPresent(), syncIcon.timed().isVisible(), syncIcon.timed().hasClass("running"))); waitUntil(syncIcon.timed().hasClass("running"), is(false), by(60, SECONDS)); } }
AJAX_ACTION is only 10 seconds while default is 30, so restore default
jira-dvcs-connector-pageobjects/src/main/java/com/atlassian/jira/plugins/dvcs/pageobjects/component/RepositoryDiv.java
AJAX_ACTION is only 10 seconds while default is 30, so restore default
<ide><path>ira-dvcs-connector-pageobjects/src/main/java/com/atlassian/jira/plugins/dvcs/pageobjects/component/RepositoryDiv.java <ide> import static com.atlassian.pageobjects.elements.query.Poller.by; <ide> import static com.atlassian.pageobjects.elements.query.Poller.waitUntil; <ide> import static com.atlassian.pageobjects.elements.query.Poller.waitUntilTrue; <del>import static com.atlassian.pageobjects.elements.timeout.TimeoutType.AJAX_ACTION; <ide> import static java.util.concurrent.TimeUnit.SECONDS; <ide> import static org.hamcrest.Matchers.is; <ide> <ide> public void sync() <ide> { <ide> final PageElement syncIcon = getSyncIcon(); <del> waitUntilTrue(and(syncIcon.withTimeout(AJAX_ACTION).timed().isPresent(), <del> syncIcon.withTimeout(AJAX_ACTION).timed().isEnabled(), <del> syncIcon.withTimeout(AJAX_ACTION).timed().isVisible())); <add> waitUntilTrue(and(syncIcon.timed().isPresent(), syncIcon.timed().isEnabled(), syncIcon.timed().isVisible())); <ide> syncIcon.click(); <ide> waitUntilTrue(and(syncIcon.timed().isPresent(), syncIcon.timed().isVisible(), syncIcon.timed().hasClass("running"))); <ide> waitUntil(syncIcon.timed().hasClass("running"), is(false), by(60, SECONDS));
Java
apache-2.0
ed528f37f4f812f3fd6e6056567a77794be86efb
0
katre/bazel,perezd/bazel,twitter-forks/bazel,bazelbuild/bazel,ButterflyNetwork/bazel,akira-baruah/bazel,bazelbuild/bazel,ulfjack/bazel,bazelbuild/bazel,ButterflyNetwork/bazel,perezd/bazel,ulfjack/bazel,davidzchen/bazel,twitter-forks/bazel,werkt/bazel,meteorcloudy/bazel,ButterflyNetwork/bazel,safarmer/bazel,katre/bazel,cushon/bazel,meteorcloudy/bazel,werkt/bazel,davidzchen/bazel,katre/bazel,davidzchen/bazel,dslomov/bazel,twitter-forks/bazel,werkt/bazel,perezd/bazel,meteorcloudy/bazel,meteorcloudy/bazel,dslomov/bazel-windows,safarmer/bazel,perezd/bazel,ButterflyNetwork/bazel,akira-baruah/bazel,perezd/bazel,dslomov/bazel-windows,dslomov/bazel,ulfjack/bazel,werkt/bazel,davidzchen/bazel,twitter-forks/bazel,safarmer/bazel,werkt/bazel,cushon/bazel,dslomov/bazel,bazelbuild/bazel,cushon/bazel,meteorcloudy/bazel,dslomov/bazel-windows,dslomov/bazel-windows,dslomov/bazel-windows,meteorcloudy/bazel,bazelbuild/bazel,akira-baruah/bazel,twitter-forks/bazel,dslomov/bazel,dslomov/bazel,katre/bazel,davidzchen/bazel,dslomov/bazel,perezd/bazel,twitter-forks/bazel,akira-baruah/bazel,perezd/bazel,ulfjack/bazel,werkt/bazel,ulfjack/bazel,akira-baruah/bazel,ButterflyNetwork/bazel,twitter-forks/bazel,dslomov/bazel,katre/bazel,cushon/bazel,cushon/bazel,ulfjack/bazel,davidzchen/bazel,davidzchen/bazel,safarmer/bazel,cushon/bazel,ulfjack/bazel,safarmer/bazel,bazelbuild/bazel,katre/bazel,safarmer/bazel,akira-baruah/bazel,dslomov/bazel-windows,meteorcloudy/bazel,ButterflyNetwork/bazel
// Copyright 2015 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.rules.objc; import static com.google.common.collect.ImmutableSortedSet.toImmutableSortedSet; import static com.google.devtools.build.lib.packages.ImplicitOutputsFunction.fromTemplates; import static com.google.devtools.build.lib.rules.cpp.Link.LINK_LIBRARY_FILETYPES; import static com.google.devtools.build.lib.rules.objc.ObjcProvider.DEFINE; import static com.google.devtools.build.lib.rules.objc.ObjcProvider.DYNAMIC_FRAMEWORK_FILE; import static com.google.devtools.build.lib.rules.objc.ObjcProvider.FORCE_LOAD_LIBRARY; import static com.google.devtools.build.lib.rules.objc.ObjcProvider.FRAMEWORK_SEARCH_PATHS; import static com.google.devtools.build.lib.rules.objc.ObjcProvider.HEADER; import static com.google.devtools.build.lib.rules.objc.ObjcProvider.IMPORTED_LIBRARY; import static com.google.devtools.build.lib.rules.objc.ObjcProvider.INCLUDE; import static com.google.devtools.build.lib.rules.objc.ObjcProvider.INCLUDE_SYSTEM; import static com.google.devtools.build.lib.rules.objc.ObjcProvider.LIBRARY; import static com.google.devtools.build.lib.rules.objc.ObjcProvider.LINK_INPUTS; import static com.google.devtools.build.lib.rules.objc.ObjcProvider.SDK_DYLIB; import static com.google.devtools.build.lib.rules.objc.ObjcProvider.SDK_FRAMEWORK; import static com.google.devtools.build.lib.rules.objc.ObjcProvider.STATIC_FRAMEWORK_FILE; import static com.google.devtools.build.lib.rules.objc.ObjcRuleClasses.COMPILABLE_SRCS_TYPE; import static com.google.devtools.build.lib.rules.objc.ObjcRuleClasses.HEADERS; import static com.google.devtools.build.lib.rules.objc.ObjcRuleClasses.NON_ARC_SRCS_TYPE; import static com.google.devtools.build.lib.rules.objc.ObjcRuleClasses.PRECOMPILED_SRCS_TYPE; import static com.google.devtools.build.lib.rules.objc.ObjcRuleClasses.SRCS_TYPE; import static com.google.devtools.build.lib.rules.objc.ObjcRuleClasses.STRIP; import static java.nio.charset.StandardCharsets.ISO_8859_1; import static java.util.Comparator.naturalOrder; import static java.util.stream.Collectors.toCollection; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Optional; import com.google.common.base.Preconditions; import com.google.common.base.Predicate; import com.google.common.base.Predicates; import com.google.common.collect.ArrayListMultimap; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.ImmutableSortedSet; import com.google.common.collect.Iterables; import com.google.common.collect.ListMultimap; import com.google.common.collect.Lists; import com.google.common.collect.Sets; import com.google.common.collect.Streams; import com.google.devtools.build.lib.actions.ActionAnalysisMetadata; import com.google.devtools.build.lib.actions.Artifact; import com.google.devtools.build.lib.actions.CommandLine; import com.google.devtools.build.lib.actions.CommandLineExpansionException; import com.google.devtools.build.lib.actions.ParamFileInfo; import com.google.devtools.build.lib.actions.ParameterFile; import com.google.devtools.build.lib.analysis.AnalysisEnvironment; import com.google.devtools.build.lib.analysis.AnalysisUtils; import com.google.devtools.build.lib.analysis.FilesToRunProvider; import com.google.devtools.build.lib.analysis.PrerequisiteArtifacts; import com.google.devtools.build.lib.analysis.RuleContext; import com.google.devtools.build.lib.analysis.TransitiveInfoCollection; import com.google.devtools.build.lib.analysis.actions.CustomCommandLine; import com.google.devtools.build.lib.analysis.actions.CustomCommandLine.VectorArg; import com.google.devtools.build.lib.analysis.actions.ParameterFileWriteAction; import com.google.devtools.build.lib.analysis.actions.SpawnAction; import com.google.devtools.build.lib.analysis.config.BuildConfiguration; import com.google.devtools.build.lib.analysis.configuredtargets.RuleConfiguredTarget.Mode; import com.google.devtools.build.lib.analysis.test.InstrumentedFilesCollector; import com.google.devtools.build.lib.analysis.test.InstrumentedFilesCollector.InstrumentationSpec; import com.google.devtools.build.lib.analysis.test.InstrumentedFilesCollector.LocalMetadataCollector; import com.google.devtools.build.lib.analysis.test.InstrumentedFilesInfo; import com.google.devtools.build.lib.collect.nestedset.NestedSet; import com.google.devtools.build.lib.collect.nestedset.NestedSetBuilder; import com.google.devtools.build.lib.collect.nestedset.Order; import com.google.devtools.build.lib.packages.BuildType; import com.google.devtools.build.lib.packages.ImplicitOutputsFunction.SafeImplicitOutputsFunction; import com.google.devtools.build.lib.packages.RuleClass.ConfiguredTargetFactory.RuleErrorException; import com.google.devtools.build.lib.packages.TargetUtils; import com.google.devtools.build.lib.rules.apple.AppleCommandLineOptions.AppleBitcodeMode; import com.google.devtools.build.lib.rules.apple.AppleConfiguration; import com.google.devtools.build.lib.rules.apple.XcodeConfig; import com.google.devtools.build.lib.rules.apple.XcodeConfigProvider; import com.google.devtools.build.lib.rules.cpp.CcCommon; import com.google.devtools.build.lib.rules.cpp.CcCompilationContext; import com.google.devtools.build.lib.rules.cpp.CcCompilationHelper; import com.google.devtools.build.lib.rules.cpp.CcCompilationHelper.CompilationInfo; import com.google.devtools.build.lib.rules.cpp.CcCompilationOutputs; import com.google.devtools.build.lib.rules.cpp.CcInfo; import com.google.devtools.build.lib.rules.cpp.CcLinkingHelper; import com.google.devtools.build.lib.rules.cpp.CcToolchain; import com.google.devtools.build.lib.rules.cpp.CcToolchainFeatures.CollidingProvidesException; import com.google.devtools.build.lib.rules.cpp.CcToolchainFeatures.FeatureConfiguration; import com.google.devtools.build.lib.rules.cpp.CcToolchainProvider; import com.google.devtools.build.lib.rules.cpp.CcToolchainVariables.VariablesExtension; import com.google.devtools.build.lib.rules.cpp.CppCompileAction; import com.google.devtools.build.lib.rules.cpp.CppConfiguration; import com.google.devtools.build.lib.rules.cpp.CppFileTypes; import com.google.devtools.build.lib.rules.cpp.CppHelper; import com.google.devtools.build.lib.rules.cpp.CppLinkAction; import com.google.devtools.build.lib.rules.cpp.CppLinkActionBuilder; import com.google.devtools.build.lib.rules.cpp.CppModuleMap; import com.google.devtools.build.lib.rules.cpp.CppModuleMapAction; import com.google.devtools.build.lib.rules.cpp.CppRuleClasses; import com.google.devtools.build.lib.rules.cpp.FdoContext; import com.google.devtools.build.lib.rules.cpp.IncludeProcessing; import com.google.devtools.build.lib.rules.cpp.IncludeScanning; import com.google.devtools.build.lib.rules.cpp.Link.LinkTargetType; import com.google.devtools.build.lib.rules.cpp.Link.LinkingMode; import com.google.devtools.build.lib.rules.cpp.NoProcessing; import com.google.devtools.build.lib.rules.cpp.PrecompiledFiles; import com.google.devtools.build.lib.rules.cpp.UmbrellaHeaderAction; import com.google.devtools.build.lib.rules.objc.ObjcProvider.Flag; import com.google.devtools.build.lib.rules.objc.ObjcVariablesExtension.VariableCategory; import com.google.devtools.build.lib.util.FileTypeSet; import com.google.devtools.build.lib.util.Pair; import com.google.devtools.build.lib.vfs.PathFragment; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.TreeMap; import java.util.stream.Stream; import javax.annotation.Nullable; /** * Support for rules that compile sources. Provides ways to determine files that should be output, * registering Xcode settings and generating the various actions that might be needed for * compilation. * * <p>A subclass should express a particular strategy for compile and link action registration. * Subclasses should implement the API without adding new visible methods - rule implementations * should be able to use a {@link CompilationSupport} instance to compile and link source without * knowing the subclass being used. * * <p>Methods on this class can be called in any order without impacting the result. */ public class CompilationSupport { @VisibleForTesting static final String OBJC_MODULE_CACHE_DIR_NAME = "_objc_module_cache"; @VisibleForTesting static final String MODULES_CACHE_PATH_WARNING = "setting '-fmodules-cache-path' manually in copts is unsupported"; @VisibleForTesting static final String ABSOLUTE_INCLUDES_PATH_FORMAT = "The path '%s' is absolute, but only relative paths are allowed."; // Flags for clang 6.1(xcode 6.4) @VisibleForTesting static final ImmutableList<String> CLANG_GCOV_COVERAGE_FLAGS = ImmutableList.of("-fprofile-arcs", "-ftest-coverage"); @VisibleForTesting static final ImmutableList<String> CLANG_LLVM_COVERAGE_FLAGS = ImmutableList.of("-fprofile-instr-generate", "-fcoverage-mapping"); // These are added by Xcode when building, because the simulator is built on OSX // frameworks so we aim compile to match the OSX objc runtime. @VisibleForTesting static final ImmutableList<String> SIMULATOR_COMPILE_FLAGS = ImmutableList.of( "-fexceptions", "-fasm-blocks", "-fobjc-abi-version=2", "-fobjc-legacy-dispatch"); /** * Frameworks implicitly linked to iOS, watchOS, and tvOS binaries when using legacy compilation. */ @VisibleForTesting static final ImmutableList<SdkFramework> AUTOMATIC_SDK_FRAMEWORKS = ImmutableList.of(new SdkFramework("Foundation"), new SdkFramework("UIKit")); /** Selects cc libraries that have alwayslink=1. */ private static final Predicate<Artifact> ALWAYS_LINKED_CC_LIBRARY = input -> LINK_LIBRARY_FILETYPES.matches(input.getFilename()); private static final String OBJC_MODULE_FEATURE_NAME = "use_objc_modules"; private static final String NO_ENABLE_MODULES_FEATURE_NAME = "no_enable_modules"; private static final String DEAD_STRIP_FEATURE_NAME = "dead_strip"; /** * Enabled if this target's rule is not a test rule. Binary stripping should not be applied in the * link step. TODO(b/36562173): Replace this behavior with a condition on bundle creation. * * <p>Note that the crosstool does not support feature negation in FlagSet.with_feature, which is * the mechanism used to condition linker arguments here. Therefore, we expose * "is_not_test_target" instead of the more intuitive "is_test_target". */ private static final String IS_NOT_TEST_TARGET_FEATURE_NAME = "is_not_test_target"; /** Enabled if this target generates debug symbols in a dSYM file. */ private static final String GENERATE_DSYM_FILE_FEATURE_NAME = "generate_dsym_file"; /** * Enabled if this target does not generate debug symbols. * * <p>Note that the crosstool does not support feature negation in FlagSet.with_feature, which is * the mechanism used to condition linker arguments here. Therefore, we expose * "no_generate_debug_symbols" in addition to "generate_dsym_file" */ private static final String NO_GENERATE_DEBUG_SYMBOLS_FEATURE_NAME = "no_generate_debug_symbols"; private static final String GENERATE_LINKMAP_FEATURE_NAME = "generate_linkmap"; private static final String XCODE_VERSION_FEATURE_NAME_PREFIX = "xcode_"; /** Enabled if this target has objc sources in its transitive closure. */ private static final String CONTAINS_OBJC = "contains_objc_sources"; private static final ImmutableList<String> ACTIVATED_ACTIONS = ImmutableList.of( "objc-compile", "objc++-compile", "objc-archive", "objc-fully-link", "objc-executable", "objc++-executable", "assemble", "preprocess-assemble", "c-compile", "c++-compile"); /** The kind of include processing to use. */ enum IncludeProcessingType { HEADER_THINNING, INCLUDE_SCANNING, NO_PROCESSING; } /** Returns the location of the xcrunwrapper tool. */ public static final FilesToRunProvider xcrunwrapper(RuleContext ruleContext) { return ruleContext.getExecutablePrerequisite("$xcrunwrapper", Mode.HOST); } /** Returns the location of the libtool tool. */ public static final FilesToRunProvider libtool(RuleContext ruleContext) { return ruleContext.getExecutablePrerequisite(ObjcRuleClasses.LIBTOOL_ATTRIBUTE, Mode.HOST); } /** * Files which can be instrumented along with the attributes in which they may occur and the * attributes along which they are propagated from dependencies (via {@link * InstrumentedFilesInfo}). */ private static final InstrumentationSpec INSTRUMENTATION_SPEC = new InstrumentationSpec( FileTypeSet.of(ObjcRuleClasses.NON_CPP_SOURCES, ObjcRuleClasses.CPP_SOURCES, HEADERS)) .withSourceAttributes("srcs", "non_arc_srcs", "hdrs") .withDependencyAttributes("deps", "data", "binary", "xctest_app"); /** Defines a library that contains the transitive closure of dependencies. */ public static final SafeImplicitOutputsFunction FULLY_LINKED_LIB = fromTemplates("%{name}_fully_linked.a"); /** * Returns additional inputs to include processing, outside of the headers provided by * ObjProvider. */ private Iterable<Artifact> getExtraIncludeProcessingInputs( Collection<Artifact> privateHdrs, Artifact pchHdr) { Iterable<Artifact> extraInputs = privateHdrs; if (pchHdr != null) { extraInputs = Iterables.concat(extraInputs, ImmutableList.of(pchHdr)); } return extraInputs; } /** * Create and return the include processing to be used. Only HeaderThinning uses potentialInputs. */ private IncludeProcessing createIncludeProcessing(Iterable<Artifact> potentialInputs) { switch (includeProcessingType) { case HEADER_THINNING: return new HeaderThinning(potentialInputs); case INCLUDE_SCANNING: return IncludeScanning.INSTANCE; default: return NoProcessing.INSTANCE; } } private CompilationInfo compile( ObjcProvider objcProvider, VariablesExtension extension, ExtraCompileArgs extraCompileArgs, CcToolchainProvider ccToolchain, FdoContext fdoContext, Iterable<PathFragment> priorityHeaders, Collection<Artifact> sources, Collection<Artifact> privateHdrs, Collection<Artifact> publicHdrs, Collection<Artifact> dependentGeneratedHdrs, Artifact pchHdr, // TODO(b/70777494): Find out how deps get used and remove if not needed. Iterable<? extends TransitiveInfoCollection> deps, ObjcCppSemantics semantics, String purpose, boolean generateModuleMap) throws RuleErrorException, InterruptedException { CcCompilationHelper result = new CcCompilationHelper( ruleContext, ruleContext, ruleContext.getLabel(), CppHelper.getGrepIncludes(ruleContext), semantics, getFeatureConfiguration(ruleContext, ccToolchain, buildConfiguration, objcProvider), CcCompilationHelper.SourceCategory.CC_AND_OBJC, ccToolchain, fdoContext, buildConfiguration, TargetUtils.getExecutionInfo( ruleContext.getRule(), ruleContext.isAllowTagsPropagation())) .addSources(sources) .addPrivateHeaders(privateHdrs) .addDefines(objcProvider.get(DEFINE)) .addPublicHeaders(publicHdrs) .addPrivateHeadersUnchecked(dependentGeneratedHdrs) .addCcCompilationContexts( Streams.stream(AnalysisUtils.getProviders(deps, CcInfo.PROVIDER)) .map(CcInfo::getCcCompilationContext) .collect(ImmutableList.toImmutableList())) .setCopts( ImmutableList.<String>builder() .addAll(getCompileRuleCopts()) .addAll( ruleContext .getFragment(ObjcConfiguration.class) .getCoptsForCompilationMode()) .addAll(extraCompileArgs) .build()) .addFrameworkIncludeDirs(frameworkHeaderSearchPathFragments(objcProvider)) .addIncludeDirs(priorityHeaders) .addIncludeDirs(objcProvider.get(INCLUDE)) .addSystemIncludeDirs(objcProvider.get(INCLUDE_SYSTEM)) .setCppModuleMap(intermediateArtifacts.moduleMap()) .setPropagateModuleMapToCompileAction(false) .addVariableExtension(extension) .setPurpose(purpose) .addQuoteIncludeDirs( ObjcCommon.userHeaderSearchPaths(objcProvider, ruleContext.getConfiguration())) .setCodeCoverageEnabled(CcCompilationHelper.isCodeCoverageEnabled(ruleContext)) .setHeadersCheckingMode(semantics.determineHeadersCheckingMode(ruleContext)); if (pchHdr != null) { result.addNonModuleMapHeader(pchHdr); } if (getCustomModuleMap(ruleContext).isPresent() || !generateModuleMap) { result.doNotGenerateModuleMap(); } return result.compile(); } private Pair<CcCompilationOutputs, ImmutableMap<String, NestedSet<Artifact>>> ccCompileAndLink( ObjcProvider objcProvider, CompilationArtifacts compilationArtifacts, ObjcVariablesExtension.Builder extensionBuilder, ExtraCompileArgs extraCompileArgs, CcToolchainProvider ccToolchain, FdoContext fdoContext, Iterable<PathFragment> priorityHeaders, LinkTargetType linkType, Artifact linkActionInput) throws RuleErrorException, InterruptedException { PrecompiledFiles precompiledFiles = new PrecompiledFiles(ruleContext); Collection<Artifact> arcSources = ImmutableSortedSet.copyOf(compilationArtifacts.getSrcs()); Collection<Artifact> nonArcSources = ImmutableSortedSet.copyOf(compilationArtifacts.getNonArcSrcs()); Collection<Artifact> privateHdrs = ImmutableSortedSet.copyOf(compilationArtifacts.getPrivateHdrs()); Collection<Artifact> publicHdrs = Stream.concat( Streams.stream(attributes.hdrs()), Streams.stream(compilationArtifacts.getAdditionalHdrs())) .collect(toImmutableSortedSet(naturalOrder())); // This is a hack to inject generated headers into the action graph for include scanning. This // is supposed to be done via the compilation prerequisite middleman artifact of dependent // CcCompilationContexts, but ObjcProvider does not propagate that. This issue will go away // when we finish migrating the compile info in ObjcProvider to CcCompilationContext. // // To limit the extra work we're adding, we only add what is required, i.e. the // generated headers. Collection<Artifact> dependentGeneratedHdrs = (includeProcessingType == IncludeProcessingType.INCLUDE_SCANNING) ? objcProvider.getGeneratedHeaderList() : ImmutableList.of(); Artifact pchHdr = getPchFile().orNull(); Iterable<? extends TransitiveInfoCollection> deps = ruleContext.getPrerequisites("deps", Mode.TARGET); ObjcCppSemantics semantics = createObjcCppSemantics(objcProvider, privateHdrs, pchHdr); String purpose = String.format("%s_objc_arc", semantics.getPurpose()); extensionBuilder.setArcEnabled(true); CompilationInfo objcArcCompilationInfo = compile( objcProvider, extensionBuilder.build(), extraCompileArgs, ccToolchain, fdoContext, priorityHeaders, arcSources, privateHdrs, publicHdrs, dependentGeneratedHdrs, pchHdr, deps, semantics, purpose, /* generateModuleMap= */ true); purpose = String.format("%s_non_objc_arc", semantics.getPurpose()); extensionBuilder.setArcEnabled(false); CompilationInfo nonObjcArcCompilationInfo = compile( objcProvider, extensionBuilder.build(), extraCompileArgs, ccToolchain, fdoContext, priorityHeaders, nonArcSources, privateHdrs, publicHdrs, dependentGeneratedHdrs, pchHdr, deps, semantics, purpose, // Only generate the module map once (see above) and re-use it here. /* generateModuleMap= */ false); FeatureConfiguration featureConfiguration = getFeatureConfiguration(ruleContext, ccToolchain, buildConfiguration, objcProvider); CcLinkingHelper resultLink = new CcLinkingHelper( ruleContext, ruleContext.getLabel(), ruleContext, ruleContext, semantics, featureConfiguration, ccToolchain, fdoContext, buildConfiguration, ruleContext.getFragment(CppConfiguration.class), ruleContext.getSymbolGenerator(), TargetUtils.getExecutionInfo( ruleContext.getRule(), ruleContext.isAllowTagsPropagation())) .setGrepIncludes(CppHelper.getGrepIncludes(ruleContext)) .setIsStampingEnabled(AnalysisUtils.isStampingEnabled(ruleContext)) .setTestOrTestOnlyTarget(ruleContext.isTestTarget() || ruleContext.isTestOnlyTarget()) .addCcLinkingContexts( CppHelper.getLinkingContextsFromDeps( ImmutableList.copyOf(ruleContext.getPrerequisites("deps", Mode.TARGET)))) .setLinkedArtifactNameSuffix(intermediateArtifacts.archiveFileNameSuffix()) .setNeverLink(true) .addVariableExtension(extensionBuilder.build()); if (linkType != null) { resultLink.setStaticLinkType(linkType); } if (linkActionInput != null) { resultLink.addLinkActionInput(linkActionInput); } CcCompilationContext.Builder ccCompilationContextBuilder = CcCompilationContext.builder( ruleContext, ruleContext.getConfiguration(), ruleContext.getLabel()); ccCompilationContextBuilder.mergeDependentCcCompilationContexts( Arrays.asList( objcArcCompilationInfo.getCcCompilationContext(), nonObjcArcCompilationInfo.getCcCompilationContext())); ccCompilationContextBuilder.setPurpose( String.format("%s_merged_arc_non_arc_objc", semantics.getPurpose())); ccCompilationContextBuilder.addQuoteIncludeDirs( ObjcCommon.userHeaderSearchPaths(objcProvider, ruleContext.getConfiguration())); CcCompilationOutputs precompiledFilesObjects = CcCompilationOutputs.builder() .addObjectFiles(precompiledFiles.getObjectFiles(/* usePic= */ false)) .addPicObjectFiles(precompiledFiles.getObjectFiles(/* usePic= */ true)) .build(); CcCompilationOutputs.Builder compilationOutputsBuilder = CcCompilationOutputs.builder() .merge(objcArcCompilationInfo.getCcCompilationOutputs()) .merge(nonObjcArcCompilationInfo.getCcCompilationOutputs()) .merge(precompiledFilesObjects); compilationOutputsBuilder.merge(objcArcCompilationInfo.getCcCompilationOutputs()); compilationOutputsBuilder.merge(nonObjcArcCompilationInfo.getCcCompilationOutputs()); CcCompilationOutputs compilationOutputs = compilationOutputsBuilder.build(); if (!compilationOutputs.isEmpty()) { resultLink.link(compilationOutputs); } CppConfiguration cppConfiguration = ruleContext.getFragment(CppConfiguration.class); Map<String, NestedSet<Artifact>> arcOutputGroups = CcCompilationHelper.buildOutputGroupsForEmittingCompileProviders( objcArcCompilationInfo.getCcCompilationOutputs(), objcArcCompilationInfo.getCcCompilationContext(), cppConfiguration, ccToolchain, featureConfiguration, ruleContext); Map<String, NestedSet<Artifact>> nonArcOutputGroups = CcCompilationHelper.buildOutputGroupsForEmittingCompileProviders( nonObjcArcCompilationInfo.getCcCompilationOutputs(), nonObjcArcCompilationInfo.getCcCompilationContext(), cppConfiguration, ccToolchain, featureConfiguration, ruleContext); Map<String, NestedSet<Artifact>> mergedOutputGroups = CcCommon.mergeOutputGroups(ImmutableList.of(arcOutputGroups, nonArcOutputGroups)); return new Pair<>(compilationOutputsBuilder.build(), ImmutableMap.copyOf(mergedOutputGroups)); } private ObjcCppSemantics createObjcCppSemantics( ObjcProvider objcProvider, Collection<Artifact> privateHdrs, Artifact pchHdr) { Iterable<Artifact> extraInputs = getExtraIncludeProcessingInputs(privateHdrs, pchHdr); return new ObjcCppSemantics( objcProvider, includeProcessingType, createIncludeProcessing(Iterables.concat(extraInputs, objcProvider.get(HEADER))), extraInputs, ruleContext.getFragment(ObjcConfiguration.class), intermediateArtifacts, buildConfiguration, attributes.enableModules()); } private FeatureConfiguration getFeatureConfiguration( RuleContext ruleContext, CcToolchainProvider ccToolchain, BuildConfiguration configuration, ObjcProvider objcProvider) { boolean isHost = ruleContext.getConfiguration().isHostConfiguration(); ImmutableSet.Builder<String> activatedCrosstoolSelectables = ImmutableSet.<String>builder() .addAll(ccToolchain.getFeatures().getDefaultFeaturesAndActionConfigs()) .addAll(ACTIVATED_ACTIONS) .addAll( ruleContext .getFragment(AppleConfiguration.class) .getBitcodeMode() .getFeatureNames()) // We create a module map by default to allow for Swift interop. .add(CppRuleClasses.MODULE_MAPS) .add(CppRuleClasses.COMPILE_ALL_MODULES) .add(CppRuleClasses.EXCLUDE_PRIVATE_HEADERS_IN_MODULE_MAPS) .add(CppRuleClasses.ONLY_DOTH_HEADERS_IN_MODULE_MAPS) .add(CppRuleClasses.DEPENDENCY_FILE) .add(CppRuleClasses.INCLUDE_PATHS) .add(isHost ? "host" : "nonhost") .add(configuration.getCompilationMode().toString()); if (configuration.getFragment(ObjcConfiguration.class).moduleMapsEnabled() && !getCustomModuleMap(ruleContext).isPresent()) { activatedCrosstoolSelectables.add(OBJC_MODULE_FEATURE_NAME); } if (!attributes.enableModules()) { activatedCrosstoolSelectables.add(NO_ENABLE_MODULES_FEATURE_NAME); } if (configuration.getFragment(ObjcConfiguration.class).shouldStripBinary()) { activatedCrosstoolSelectables.add(DEAD_STRIP_FEATURE_NAME); } if (getPchFile().isPresent()) { activatedCrosstoolSelectables.add("pch"); } if (!isTestRule) { activatedCrosstoolSelectables.add(IS_NOT_TEST_TARGET_FEATURE_NAME); } if (configuration.getFragment(ObjcConfiguration.class).generateDsym()) { activatedCrosstoolSelectables.add(GENERATE_DSYM_FILE_FEATURE_NAME); } else { activatedCrosstoolSelectables.add(NO_GENERATE_DEBUG_SYMBOLS_FEATURE_NAME); } if (configuration.getFragment(ObjcConfiguration.class).generateLinkmap()) { activatedCrosstoolSelectables.add(GENERATE_LINKMAP_FEATURE_NAME); } AppleBitcodeMode bitcodeMode = configuration.getFragment(AppleConfiguration.class).getBitcodeMode(); if (bitcodeMode != AppleBitcodeMode.NONE) { activatedCrosstoolSelectables.addAll(bitcodeMode.getFeatureNames()); } if (objcProvider.is(Flag.USES_OBJC)) { activatedCrosstoolSelectables.add(CONTAINS_OBJC); } // Add a feature identifying the Xcode version so CROSSTOOL authors can enable flags for // particular versions of Xcode. To ensure consistency across platforms, use exactly two // components in the version number. activatedCrosstoolSelectables.add( XCODE_VERSION_FEATURE_NAME_PREFIX + XcodeConfig.getXcodeConfigProvider(ruleContext) .getXcodeVersion() .toStringWithComponents(2)); activatedCrosstoolSelectables.addAll(ruleContext.getFeatures()); CppConfiguration cppConfiguration = ruleContext.getFragment(CppConfiguration.class); activatedCrosstoolSelectables.addAll(CcCommon.getCoverageFeatures(cppConfiguration)); try { return ccToolchain .getFeatures() .getFeatureConfiguration(activatedCrosstoolSelectables.build()); } catch (CollidingProvidesException e) { ruleContext.ruleError(e.getMessage()); return FeatureConfiguration.EMPTY; } } /** Iterable wrapper providing strong type safety for arguments to binary linking. */ static final class ExtraLinkArgs extends IterableWrapper<String> { ExtraLinkArgs(String... args) { super(args); } ExtraLinkArgs(Iterable<String> args) { super(args); } } /** Iterable wrapper providing strong type safety for extra compile flags. */ static final class ExtraCompileArgs extends IterableWrapper<String> { static final ExtraCompileArgs NONE = new ExtraCompileArgs(); ExtraCompileArgs(String... args) { super(args); } } @VisibleForTesting static final String FILE_IN_SRCS_AND_HDRS_WARNING_FORMAT = "File '%s' is in both srcs and hdrs."; @VisibleForTesting static final String FILE_IN_SRCS_AND_NON_ARC_SRCS_ERROR_FORMAT = "File '%s' is present in both srcs and non_arc_srcs which is forbidden."; @VisibleForTesting static final String BOTH_MODULE_NAME_AND_MODULE_MAP_SPECIFIED = "Specifying both module_name and module_map is invalid, please remove one of them."; static final ImmutableList<String> DEFAULT_COMPILER_FLAGS = ImmutableList.of("-DOS_IOS"); /** * Set of {@link com.google.devtools.build.lib.util.FileType} of source artifacts that are * compatible with header thinning. */ private static final FileTypeSet SOURCES_FOR_HEADER_THINNING = FileTypeSet.of( CppFileTypes.OBJC_SOURCE, CppFileTypes.OBJCPP_SOURCE, CppFileTypes.CPP_SOURCE, CppFileTypes.C_SOURCE); /** Returns information about the given rule's compilation artifacts. */ // TODO(bazel-team): Remove this information from ObjcCommon and move it internal to this class. static CompilationArtifacts compilationArtifacts(RuleContext ruleContext) { return compilationArtifacts(ruleContext, ObjcRuleClasses.intermediateArtifacts(ruleContext)); } /** * Returns information about the given rule's compilation artifacts. Dependencies specified in the * current rule's attributes are obtained via {@code ruleContext}. Output locations are determined * using the given {@code intermediateArtifacts} object. The fact that these are distinct objects * allows the caller to generate compilation actions pertaining to a configuration separate from * the current rule's configuration. */ static CompilationArtifacts compilationArtifacts( RuleContext ruleContext, IntermediateArtifacts intermediateArtifacts) { PrerequisiteArtifacts srcs = ruleContext.getPrerequisiteArtifacts("srcs", Mode.TARGET).errorsForNonMatching(SRCS_TYPE); return new CompilationArtifacts.Builder() .addSrcs(srcs.filter(COMPILABLE_SRCS_TYPE).list()) .addNonArcSrcs( ruleContext .getPrerequisiteArtifacts("non_arc_srcs", Mode.TARGET) .errorsForNonMatching(NON_ARC_SRCS_TYPE) .list()) .addPrivateHdrs(srcs.filter(HEADERS).list()) .addPrecompiledSrcs(srcs.filter(PRECOMPILED_SRCS_TYPE).list()) .setIntermediateArtifacts(intermediateArtifacts) .build(); } /** Returns a list of framework header search path fragments. */ static ImmutableList<PathFragment> frameworkHeaderSearchPathFragments(ObjcProvider provider) throws InterruptedException { ImmutableList.Builder<PathFragment> searchPaths = new ImmutableList.Builder<>(); return searchPaths .addAll(uniqueParentDirectories(provider.get(FRAMEWORK_SEARCH_PATHS))) .build(); } /** Returns a list of framework header search paths. */ static ImmutableList<String> frameworkHeaderSearchPaths(ObjcProvider provider) throws InterruptedException { ImmutableList.Builder<String> searchPaths = new ImmutableList.Builder<>(); return searchPaths .addAll( Iterables.transform( frameworkHeaderSearchPathFragments(provider), PathFragment::getSafePathString)) .build(); } /** Returns a list of framework library search paths. */ static ImmutableList<String> frameworkLibrarySearchPaths(ObjcProvider provider) throws InterruptedException { ImmutableList.Builder<String> searchPaths = new ImmutableList.Builder<>(); return searchPaths // Add library search paths corresponding to custom (non-SDK) frameworks. For each framework // foo/bar.framework, include "foo" as a search path. .addAll(provider.staticFrameworkPaths()) .addAll(provider.dynamicFrameworkPaths()) .build(); } private final RuleContext ruleContext; private final BuildConfiguration buildConfiguration; private final ObjcConfiguration objcConfiguration; private final AppleConfiguration appleConfiguration; private final CompilationAttributes attributes; private final IntermediateArtifacts intermediateArtifacts; private final Map<String, NestedSet<Artifact>> outputGroupCollector; private final ImmutableList.Builder<Artifact> objectFilesCollector; private final CcToolchainProvider toolchain; private final boolean isTestRule; private final boolean usePch; private final IncludeProcessingType includeProcessingType; /** * Creates a new compilation support for the given rule and build configuration. * * <p>All actions will be created under the given build configuration, which may be different than * the current rule context configuration. * * <p>The compilation and linking flags will be retrieved from the given compilation attributes. * The names of the generated artifacts will be retrieved from the given intermediate artifacts. * * <p>By instantiating multiple compilation supports for the same rule but with intermediate * artifacts with different output prefixes, multiple archives can be compiled for the same rule * context. */ private CompilationSupport( RuleContext ruleContext, BuildConfiguration buildConfiguration, IntermediateArtifacts intermediateArtifacts, CompilationAttributes compilationAttributes, Map<String, NestedSet<Artifact>> outputGroupCollector, ImmutableList.Builder<Artifact> objectFilesCollector, CcToolchainProvider toolchain, boolean isTestRule, boolean usePch) throws InterruptedException { this.ruleContext = ruleContext; this.buildConfiguration = buildConfiguration; this.objcConfiguration = buildConfiguration.getFragment(ObjcConfiguration.class); this.appleConfiguration = buildConfiguration.getFragment(AppleConfiguration.class); this.attributes = compilationAttributes; this.intermediateArtifacts = intermediateArtifacts; this.isTestRule = isTestRule; this.outputGroupCollector = outputGroupCollector; this.objectFilesCollector = objectFilesCollector; this.usePch = usePch; if (toolchain == null && ruleContext .attributes() .has(CcToolchain.CC_TOOLCHAIN_DEFAULT_ATTRIBUTE_NAME, BuildType.LABEL)) { toolchain = CppHelper.getToolchainUsingDefaultCcToolchainAttribute(ruleContext); } this.toolchain = toolchain; if (objcConfiguration.shouldScanIncludes()) { includeProcessingType = IncludeProcessingType.INCLUDE_SCANNING; } else if (isHeaderThinningEnabled()) { includeProcessingType = IncludeProcessingType.HEADER_THINNING; } else { includeProcessingType = IncludeProcessingType.NO_PROCESSING; } } /** Builder for {@link CompilationSupport} */ public static class Builder { private RuleContext ruleContext; private BuildConfiguration buildConfiguration; private IntermediateArtifacts intermediateArtifacts; private CompilationAttributes compilationAttributes; private Map<String, NestedSet<Artifact>> outputGroupCollector; private ImmutableList.Builder<Artifact> objectFilesCollector; private CcToolchainProvider toolchain; private boolean isTestRule = false; private boolean usePch = true; /** Sets the {@link RuleContext} for the calling target. */ public Builder setRuleContext(RuleContext ruleContext) { this.ruleContext = ruleContext; return this; } /** Sets the {@link BuildConfiguration} for the calling target. */ public Builder setConfig(BuildConfiguration buildConfiguration) { this.buildConfiguration = buildConfiguration; return this; } /** Sets {@link IntermediateArtifacts} for deriving artifact paths. */ public Builder setIntermediateArtifacts(IntermediateArtifacts intermediateArtifacts) { this.intermediateArtifacts = intermediateArtifacts; return this; } /** Sets {@link CompilationAttributes} for the calling target. */ public Builder setCompilationAttributes(CompilationAttributes compilationAttributes) { this.compilationAttributes = compilationAttributes; return this; } /** * Sets that this {@link CompilationSupport} will not use the pch from the rule context in * determining compilation actions. */ public Builder doNotUsePch() { this.usePch = false; return this; } /** Indicates that this CompilationSupport is for use in a test rule. */ public Builder setIsTestRule() { this.isTestRule = true; return this; } /** * Causes the provided map to be updated with output groups produced by compile action * registration. * * <p>This map is intended to be mutated by {@link * CompilationSupport#registerCompileAndArchiveActions}. The added output groups should be * exported by the calling rule class implementation. */ public Builder setOutputGroupCollector(Map<String, NestedSet<Artifact>> outputGroupCollector) { this.outputGroupCollector = outputGroupCollector; return this; } /** * Set a collector for the object files produced by compile action registration. * * <p>The object files are intended to be added by {@link * CompilationSupport#registerCompileAndArchiveActions}. */ public Builder setObjectFilesCollector(ImmutableList.Builder<Artifact> objectFilesCollector) { this.objectFilesCollector = objectFilesCollector; return this; } /** * Sets {@link CcToolchainProvider} for the calling target. * * <p>This is needed if it can't correctly be inferred directly from the rule context. Setting * to null causes the default to be used as if this was never called. */ public Builder setToolchainProvider(CcToolchainProvider toolchain) { this.toolchain = toolchain; return this; } /** Returns a {@link CompilationSupport} instance. */ public CompilationSupport build() throws InterruptedException { Preconditions.checkNotNull(ruleContext, "CompilationSupport is missing RuleContext"); if (buildConfiguration == null) { buildConfiguration = ruleContext.getConfiguration(); } if (intermediateArtifacts == null) { intermediateArtifacts = ObjcRuleClasses.intermediateArtifacts(ruleContext, buildConfiguration); } if (compilationAttributes == null) { compilationAttributes = CompilationAttributes.Builder.fromRuleContext(ruleContext).build(); } if (outputGroupCollector == null) { outputGroupCollector = new TreeMap<>(); } if (objectFilesCollector == null) { objectFilesCollector = ImmutableList.builder(); } return new CompilationSupport( ruleContext, buildConfiguration, intermediateArtifacts, compilationAttributes, outputGroupCollector, objectFilesCollector, toolchain, isTestRule, usePch); } } /** * Returns a provider that collects this target's instrumented sources as well as those of its * dependencies. * * @param objectFiles the object files generated by this target * @return an instrumented files provider */ public InstrumentedFilesInfo getInstrumentedFilesProvider(ImmutableList<Artifact> objectFiles) { return InstrumentedFilesCollector.collect( ruleContext, INSTRUMENTATION_SPEC, new ObjcCoverageMetadataCollector(), objectFiles, NestedSetBuilder.<Artifact>emptySet(Order.STABLE_ORDER), // The COVERAGE_GCOV_PATH environment variable is added in TestSupport#getExtraProviders() NestedSetBuilder.<Pair<String, String>>emptySet(Order.COMPILE_ORDER), !isTestRule, /* reportedToActualSources= */ NestedSetBuilder.create(Order.STABLE_ORDER)); } /** * Validates compilation-related attributes on this rule. * * @return this compilation support * @throws RuleErrorException if there are attribute errors */ CompilationSupport validateAttributes() throws RuleErrorException { for (PathFragment absoluteInclude : Iterables.filter(attributes.includes(), PathFragment::isAbsolute)) { ruleContext.attributeError( "includes", String.format(ABSOLUTE_INCLUDES_PATH_FORMAT, absoluteInclude)); } if (ruleContext.attributes().has("srcs", BuildType.LABEL_LIST)) { ImmutableSet<Artifact> hdrsSet = ImmutableSet.copyOf(attributes.hdrs()); ImmutableSet<Artifact> srcsSet = ImmutableSet.copyOf(ruleContext.getPrerequisiteArtifacts("srcs", Mode.TARGET).list()); // Check for overlap between srcs and hdrs. for (Artifact header : Sets.intersection(hdrsSet, srcsSet)) { String path = header.getRootRelativePath().toString(); ruleContext.attributeWarning( "srcs", String.format(FILE_IN_SRCS_AND_HDRS_WARNING_FORMAT, path)); } // Check for overlap between srcs and non_arc_srcs. ImmutableSet<Artifact> nonArcSrcsSet = ImmutableSet.copyOf( ruleContext.getPrerequisiteArtifacts("non_arc_srcs", Mode.TARGET).list()); for (Artifact conflict : Sets.intersection(nonArcSrcsSet, srcsSet)) { String path = conflict.getRootRelativePath().toString(); ruleContext.attributeError( "srcs", String.format(FILE_IN_SRCS_AND_NON_ARC_SRCS_ERROR_FORMAT, path)); } } if (ruleContext.attributes().isAttributeValueExplicitlySpecified("module_name") && ruleContext.attributes().isAttributeValueExplicitlySpecified("module_map")) { ruleContext.attributeError("module_name", BOTH_MODULE_NAME_AND_MODULE_MAP_SPECIFIED); } ruleContext.assertNoErrors(); return this; } /** * Registers all actions necessary to compile this rule's sources and archive them. * * @param compilationArtifacts collection of artifacts required for the compilation * @param objcProvider provides all compiling and linking information to register these actions * @param toolchain the toolchain to be used in determining command lines * @return this compilation support * @throws RuleErrorException for invalid crosstool files */ CompilationSupport registerCompileAndArchiveActions( CompilationArtifacts compilationArtifacts, ObjcProvider objcProvider, CcToolchainProvider toolchain) throws RuleErrorException, InterruptedException { return registerCompileAndArchiveActions( compilationArtifacts, objcProvider, ExtraCompileArgs.NONE, ImmutableList.<PathFragment>of(), toolchain, toolchain.getFdoContext()); } /** * Registers all actions necessary to compile this rule's sources and archive them. * * @param common common information about this rule and its dependencies * @return this compilation support * @throws RuleErrorException for invalid crosstool files */ CompilationSupport registerCompileAndArchiveActions(ObjcCommon common) throws RuleErrorException, InterruptedException { return registerCompileAndArchiveActions( common, ExtraCompileArgs.NONE, ImmutableList.<PathFragment>of()); } /** * Registers all actions necessary to compile this rule's sources and archive them. * * @param common common information about this rule and its dependencies * @param priorityHeaders priority headers to be included before the dependency headers * @return this compilation support * @throws RuleErrorException for invalid crosstool files */ CompilationSupport registerCompileAndArchiveActions( ObjcCommon common, Iterable<PathFragment> priorityHeaders) throws RuleErrorException, InterruptedException { return registerCompileAndArchiveActions(common, ExtraCompileArgs.NONE, priorityHeaders); } /** * Registers all actions necessary to compile this rule's sources and archive them. * * @param compilationArtifacts collection of artifacts required for the compilation * @param objcProvider provides all compiling and linking information to register these actions * @param extraCompileArgs args to be added to compile actions * @param priorityHeaders priority headers to be included before the dependency headers * @param ccToolchain the cpp toolchain provider, may be null * @param fdoContext the cpp FDO support provider, may be null * @return this compilation support * @throws RuleErrorException for invalid crosstool files */ CompilationSupport registerCompileAndArchiveActions( CompilationArtifacts compilationArtifacts, ObjcProvider objcProvider, ExtraCompileArgs extraCompileArgs, Iterable<PathFragment> priorityHeaders, @Nullable CcToolchainProvider ccToolchain, @Nullable FdoContext fdoContext) throws RuleErrorException, InterruptedException { Preconditions.checkNotNull(ccToolchain); Preconditions.checkNotNull(fdoContext); ObjcVariablesExtension.Builder extension = new ObjcVariablesExtension.Builder() .setRuleContext(ruleContext) .setObjcProvider(objcProvider) .setCompilationArtifacts(compilationArtifacts) .setIntermediateArtifacts(intermediateArtifacts) .setConfiguration(buildConfiguration) .setFrameworkSearchPath(frameworkHeaderSearchPaths(objcProvider)); Pair<CcCompilationOutputs, ImmutableMap<String, NestedSet<Artifact>>> compilationInfo; if (compilationArtifacts.getArchive().isPresent()) { Artifact objList = intermediateArtifacts.archiveObjList(); extension.addVariableCategory(VariableCategory.ARCHIVE_VARIABLES); compilationInfo = ccCompileAndLink( objcProvider, compilationArtifacts, extension, extraCompileArgs, ccToolchain, fdoContext, priorityHeaders, LinkTargetType.OBJC_ARCHIVE, objList); // TODO(b/30783125): Signal the need for this action in the CROSSTOOL. registerObjFilelistAction( compilationInfo.getFirst().getObjectFiles(/* usePic= */ false), objList); } else { compilationInfo = ccCompileAndLink( objcProvider, compilationArtifacts, extension, extraCompileArgs, ccToolchain, fdoContext, priorityHeaders, /* linkType */ null, /* linkActionInput */ null); } objectFilesCollector.addAll(compilationInfo.getFirst().getObjectFiles(/* usePic= */ false)); outputGroupCollector.putAll(compilationInfo.getSecond()); registerHeaderScanningActions(compilationInfo.getFirst(), objcProvider, compilationArtifacts); return this; } /** * Registers all actions necessary to compile this rule's sources and archive them. * * @param common common information about this rule and its dependencies * @param extraCompileArgs args to be added to compile actions * @param priorityHeaders priority headers to be included before the dependency headers * @return this compilation support * @throws RuleErrorException for invalid crosstool files */ CompilationSupport registerCompileAndArchiveActions( ObjcCommon common, ExtraCompileArgs extraCompileArgs, Iterable<PathFragment> priorityHeaders) throws RuleErrorException, InterruptedException { if (common.getCompilationArtifacts().isPresent()) { registerCompileAndArchiveActions( common.getCompilationArtifacts().get(), common.getObjcProvider(), extraCompileArgs, priorityHeaders, toolchain, toolchain.getFdoContext()); } return this; } private StrippingType getStrippingType(ExtraLinkArgs extraLinkArgs) { if (Iterables.contains(extraLinkArgs, "-dynamiclib")) { return StrippingType.DYNAMIC_LIB; } if (Iterables.contains(extraLinkArgs, "-kext")) { return StrippingType.KERNEL_EXTENSION; } return StrippingType.DEFAULT; } /** * Registers any actions necessary to link this rule and its dependencies. Automatically infers * the toolchain from the configuration of this CompilationSupport - if a different toolchain is * required, use the custom toolchain override. * * <p>Dsym bundle is generated if {@link ObjcConfiguration#generateDsym()} is set. * * <p>When Bazel flags {@code --compilation_mode=opt} and {@code --objc_enable_binary_stripping} * are specified, additional optimizations will be performed on the linked binary: all-symbol * stripping (using {@code /usr/bin/strip}) and dead-code stripping (using linker flags: {@code * -dead_strip} and {@code -no_dead_strip_inits_and_terms}). * * @param objcProvider common information about this rule's attributes and its dependencies * @param j2ObjcMappingFileProvider contains mapping files for j2objc transpilation * @param j2ObjcEntryClassProvider contains j2objc entry class information for dead code removal * @param extraLinkArgs any additional arguments to pass to the linker * @param extraLinkInputs any additional input artifacts to pass to the link action * @return this compilation support */ CompilationSupport registerLinkActions( ObjcProvider objcProvider, J2ObjcMappingFileProvider j2ObjcMappingFileProvider, J2ObjcEntryClassProvider j2ObjcEntryClassProvider, ExtraLinkArgs extraLinkArgs, Iterable<Artifact> extraLinkInputs, CcToolchainProvider toolchain) throws InterruptedException, RuleErrorException { Iterable<Artifact> prunedJ2ObjcArchives = computeAndStripPrunedJ2ObjcArchives( j2ObjcEntryClassProvider, j2ObjcMappingFileProvider, objcProvider); ImmutableList<Artifact> bazelBuiltLibraries = Iterables.isEmpty(prunedJ2ObjcArchives) ? objcProvider.getObjcLibraries() : substituteJ2ObjcPrunedLibraries(objcProvider); Artifact inputFileList = intermediateArtifacts.linkerObjList(); ImmutableSet<Artifact> forceLinkArtifacts = getForceLoadArtifacts(objcProvider); Iterable<Artifact> objFiles = Iterables.concat( bazelBuiltLibraries, objcProvider.get(IMPORTED_LIBRARY), objcProvider.getCcLibraries()); // Clang loads archives specified in filelists and also specified as -force_load twice, // resulting in duplicate symbol errors unless they are deduped. objFiles = Iterables.filter(objFiles, Predicates.not(Predicates.in(forceLinkArtifacts))); registerObjFilelistAction(objFiles, inputFileList); LinkTargetType linkType = objcProvider.is(Flag.USES_CPP) ? LinkTargetType.OBJCPP_EXECUTABLE : LinkTargetType.OBJC_EXECUTABLE; ObjcVariablesExtension.Builder extensionBuilder = new ObjcVariablesExtension.Builder() .setRuleContext(ruleContext) .setObjcProvider(objcProvider) .setConfiguration(buildConfiguration) .setIntermediateArtifacts(intermediateArtifacts) .setFrameworkNames(frameworkNames(objcProvider)) .setFrameworkSearchPath(frameworkLibrarySearchPaths(objcProvider)) .setLibraryNames(libraryNames(objcProvider)) .setForceLoadArtifacts(getForceLoadArtifacts(objcProvider)) .setAttributeLinkopts(attributes.linkopts()) .addVariableCategory(VariableCategory.EXECUTABLE_LINKING_VARIABLES); Artifact binaryToLink = getBinaryToLink(); FdoContext fdoContext = toolchain.getFdoContext(); CppLinkActionBuilder executableLinkAction = new CppLinkActionBuilder( ruleContext, ruleContext, ruleContext.getLabel(), binaryToLink, ruleContext.getConfiguration(), toolchain, fdoContext, getFeatureConfiguration(ruleContext, toolchain, buildConfiguration, objcProvider), createObjcCppSemantics( objcProvider, /* privateHdrs= */ ImmutableList.of(), /* pchHdr= */ null)) .setGrepIncludes(CppHelper.getGrepIncludes(ruleContext)) .setIsStampingEnabled(AnalysisUtils.isStampingEnabled(ruleContext)) .setTestOrTestOnlyTarget(ruleContext.isTestOnlyTarget() || ruleContext.isTestTarget()) .setMnemonic("ObjcLink") .addActionInputs(bazelBuiltLibraries) .addActionInputs(objcProvider.getCcLibraries()) .addTransitiveActionInputs(objcProvider.get(IMPORTED_LIBRARY)) .addTransitiveActionInputs(objcProvider.get(STATIC_FRAMEWORK_FILE)) .addTransitiveActionInputs(objcProvider.get(DYNAMIC_FRAMEWORK_FILE)) .addTransitiveActionInputs(objcProvider.get(LINK_INPUTS)) .setLinkerFiles(toolchain.getLinkerFiles()) .addActionInputs(prunedJ2ObjcArchives) .addActionInputs(extraLinkInputs) .addActionInput(inputFileList) .setLinkType(linkType) .setLinkingMode(LinkingMode.STATIC) .addLinkopts(ImmutableList.copyOf(extraLinkArgs)); if (objcConfiguration.generateDsym()) { Artifact dsymSymbol = objcConfiguration.shouldStripBinary() ? intermediateArtifacts.dsymSymbolForUnstrippedBinary() : intermediateArtifacts.dsymSymbolForStrippedBinary(); extensionBuilder .setDsymSymbol(dsymSymbol) .addVariableCategory(VariableCategory.DSYM_VARIABLES); executableLinkAction.addActionOutput(dsymSymbol); } if (objcConfiguration.generateLinkmap()) { Artifact linkmap = intermediateArtifacts.linkmap(); extensionBuilder.setLinkmap(linkmap).addVariableCategory(VariableCategory.LINKMAP_VARIABLES); executableLinkAction.addActionOutput(linkmap); } if (appleConfiguration.getBitcodeMode() == AppleBitcodeMode.EMBEDDED) { Artifact bitcodeSymbolMap = intermediateArtifacts.bitcodeSymbolMap(); extensionBuilder .setBitcodeSymbolMap(bitcodeSymbolMap) .addVariableCategory(VariableCategory.BITCODE_VARIABLES); executableLinkAction.addActionOutput(bitcodeSymbolMap); } executableLinkAction.addVariablesExtension(extensionBuilder.build()); ruleContext.registerAction(executableLinkAction.build()); if (objcConfiguration.shouldStripBinary()) { registerBinaryStripAction(binaryToLink, getStrippingType(extraLinkArgs)); } return this; } /** * Returns the copts for the compile action in the current rule context (using a combination of * the rule's "copts" attribute as well as the current configuration copts). */ private Iterable<String> getCompileRuleCopts() { List<String> copts = Stream.concat(objcConfiguration.getCopts().stream(), attributes.copts().stream()) .collect(toCollection(ArrayList::new)); for (String copt : copts) { if (copt.contains("-fmodules-cache-path")) { // Bazel decides on the cache path location. ruleContext.ruleWarning(MODULES_CACHE_PATH_WARNING); } } if (attributes.enableModules() && !getCustomModuleMap(ruleContext).isPresent()) { copts.add("-fmodules"); } if (copts.contains("-fmodules")) { // If modules are enabled, clang caches module information. If unspecified, this is a // system-wide cache directory, which is a problem for remote executors which may run // multiple actions with different source trees that can't share this cache. // We thus set its path to the root of the genfiles directory. // Unfortunately, this cache contains non-hermetic information, thus we avoid declaring it as // an implicit output (as outputs must be hermetic). String cachePath = buildConfiguration.getGenfilesFragment() + "/" + OBJC_MODULE_CACHE_DIR_NAME; copts.add("-fmodules-cache-path=" + cachePath); } return copts; } /** * Registers an action that writes given set of object files to the given objList. This objList is * suitable to signal symbols to archive in a libtool archiving invocation. */ private CompilationSupport registerObjFilelistAction( Iterable<Artifact> objFiles, Artifact objList) { ImmutableSet<Artifact> dedupedObjFiles = ImmutableSet.copyOf(objFiles); CustomCommandLine.Builder objFilesToLinkParam = new CustomCommandLine.Builder(); ImmutableList.Builder<Artifact> treeObjFiles = new ImmutableList.Builder<>(); for (Artifact objFile : dedupedObjFiles) { // If the obj file is a tree artifact, we need to expand it into the contained individual // files properly. if (objFile.isTreeArtifact()) { treeObjFiles.add(objFile); objFilesToLinkParam.addExpandedTreeArtifactExecPaths(objFile); } else { objFilesToLinkParam.addPath(objFile.getExecPath()); } } ruleContext.registerAction( new ParameterFileWriteAction( ruleContext.getActionOwner(), treeObjFiles.build(), objList, objFilesToLinkParam.build(), ParameterFile.ParameterFileType.UNQUOTED, ISO_8859_1)); return this; } /** * Registers an action to create an archive artifact by fully (statically) linking all transitive * dependencies of this rule. * * @param objcProvider provides all compiling and linking information to create this artifact * @param outputArchive the output artifact for this action */ public CompilationSupport registerFullyLinkAction( ObjcProvider objcProvider, Artifact outputArchive) throws InterruptedException, RuleErrorException { return registerFullyLinkAction( objcProvider, outputArchive, toolchain, toolchain.getFdoContext()); } /** * Registers an action to create an archive artifact by fully (statically) linking all transitive * dependencies of this rule. * * @param objcProvider provides all compiling and linking information to create this artifact * @param outputArchive the output artifact for this action * @param ccToolchain the cpp toolchain provider, may be null * @param fdoContext the cpp FDO support provider, may be null * @return this {@link CompilationSupport} instance */ CompilationSupport registerFullyLinkAction( ObjcProvider objcProvider, Artifact outputArchive, @Nullable CcToolchainProvider ccToolchain, @Nullable FdoContext fdoContext) throws InterruptedException, RuleErrorException { Preconditions.checkNotNull(ccToolchain); Preconditions.checkNotNull(fdoContext); PathFragment labelName = PathFragment.create(ruleContext.getLabel().getName()); String libraryIdentifier = ruleContext .getPackageDirectory() .getRelative(labelName.replaceName("lib" + labelName.getBaseName())) .getPathString(); ObjcVariablesExtension extension = new ObjcVariablesExtension.Builder() .setRuleContext(ruleContext) .setObjcProvider(objcProvider) .setConfiguration(buildConfiguration) .setIntermediateArtifacts(intermediateArtifacts) .setFrameworkSearchPath(frameworkHeaderSearchPaths(objcProvider)) .setFullyLinkArchive(outputArchive) .addVariableCategory(VariableCategory.FULLY_LINK_VARIABLES) .build(); CppLinkAction fullyLinkAction = new CppLinkActionBuilder( ruleContext, ruleContext, ruleContext.getLabel(), outputArchive, ruleContext.getConfiguration(), ccToolchain, fdoContext, getFeatureConfiguration(ruleContext, ccToolchain, buildConfiguration, objcProvider), createObjcCppSemantics( objcProvider, /* privateHdrs= */ ImmutableList.of(), /* pchHdr= */ null)) .setGrepIncludes(CppHelper.getGrepIncludes(ruleContext)) .setIsStampingEnabled(AnalysisUtils.isStampingEnabled(ruleContext)) .setTestOrTestOnlyTarget(ruleContext.isTestOnlyTarget() || ruleContext.isTestTarget()) .addActionInputs(objcProvider.getObjcLibraries()) .addActionInputs(objcProvider.getCcLibraries()) .addActionInputs(objcProvider.get(IMPORTED_LIBRARY).toSet()) .setLinkerFiles(ccToolchain.getLinkerFiles()) .setLinkType(LinkTargetType.OBJC_FULLY_LINKED_ARCHIVE) .setLinkingMode(LinkingMode.STATIC) .setLibraryIdentifier(libraryIdentifier) .addVariablesExtension(extension) .build(); ruleContext.registerAction(fullyLinkAction); return this; } /** * Returns all framework names to pass to the linker using {@code -framework} flags. For a * framework in the directory foo/bar.framework, the name is "bar". Each framework is found * without using the full path by means of the framework search paths. Search paths are added by * {@link#commonLinkAndCompileFlagsForClang(ObjcProvider, ObjcConfiguration, AppleConfiguration)}) * * <p>It's awful that we can't pass the full path to the framework and avoid framework search * paths, but this is imposed on us by clang. clang does not support passing the full path to the * framework, so Bazel cannot do it either. */ private Set<String> frameworkNames(ObjcProvider provider) { Set<String> names = new LinkedHashSet<>(); Iterables.addAll(names, SdkFramework.names(provider.get(SDK_FRAMEWORK))); Iterables.addAll(names, provider.staticFrameworkNames()); Iterables.addAll(names, provider.dynamicFrameworkNames()); return names; } /** Returns libraries that should be passed to the linker. */ private ImmutableList<String> libraryNames(ObjcProvider objcProvider) { ImmutableList.Builder<String> args = new ImmutableList.Builder<>(); for (String dylib : objcProvider.get(SDK_DYLIB)) { if (dylib.startsWith("lib")) { // remove lib prefix if it exists which is standard // for libraries (libxml.dylib -> -lxml). dylib = dylib.substring(3); } args.add(dylib); } return args.build(); } /** Returns libraries that should be passed into the linker with {@code -force_load}. */ private ImmutableSet<Artifact> getForceLoadArtifacts(ObjcProvider objcProvider) { List<Artifact> ccLibraries = objcProvider.getCcLibraries(); Iterable<Artifact> ccLibrariesToForceLoad = Iterables.filter(ccLibraries, ALWAYS_LINKED_CC_LIBRARY); return ImmutableSet.<Artifact>builder() .addAll(objcProvider.get(FORCE_LOAD_LIBRARY)) .addAll(ccLibrariesToForceLoad) .build(); } /** Returns pruned J2Objc archives for this target. */ private ImmutableList<Artifact> j2objcPrunedLibraries(ObjcProvider objcProvider) { ImmutableList.Builder<Artifact> j2objcPrunedLibraryBuilder = ImmutableList.builder(); for (Artifact j2objcLibrary : objcProvider.get(ObjcProvider.J2OBJC_LIBRARY)) { j2objcPrunedLibraryBuilder.add(intermediateArtifacts.j2objcPrunedArchive(j2objcLibrary)); } return j2objcPrunedLibraryBuilder.build(); } /** Returns true if this build should strip J2Objc dead code. */ private boolean stripJ2ObjcDeadCode(J2ObjcEntryClassProvider j2ObjcEntryClassProvider) { J2ObjcConfiguration j2objcConfiguration = buildConfiguration.getFragment(J2ObjcConfiguration.class); // Only perform J2ObjC dead code stripping if flag --j2objc_dead_code_removal is specified and // users have specified entry classes. return j2objcConfiguration.removeDeadCode() && !j2ObjcEntryClassProvider.getEntryClasses().isEmpty(); } /** Registers actions to perform J2Objc dead code removal. */ private void registerJ2ObjcDeadCodeRemovalActions( ObjcProvider objcProvider, J2ObjcMappingFileProvider j2ObjcMappingFileProvider, J2ObjcEntryClassProvider j2ObjcEntryClassProvider) { NestedSet<String> entryClasses = j2ObjcEntryClassProvider.getEntryClasses(); Artifact pruner = ruleContext.getPrerequisiteArtifact("$j2objc_dead_code_pruner", Mode.HOST); NestedSet<Artifact> j2ObjcDependencyMappingFiles = j2ObjcMappingFileProvider.getDependencyMappingFiles(); NestedSet<Artifact> j2ObjcHeaderMappingFiles = j2ObjcMappingFileProvider.getHeaderMappingFiles(); NestedSet<Artifact> j2ObjcArchiveSourceMappingFiles = j2ObjcMappingFileProvider.getArchiveSourceMappingFiles(); for (Artifact j2objcArchive : objcProvider.get(ObjcProvider.J2OBJC_LIBRARY)) { Artifact prunedJ2ObjcArchive = intermediateArtifacts.j2objcPrunedArchive(j2objcArchive); Artifact dummyArchive = Iterables.getOnlyElement( ruleContext .getPrerequisite("$dummy_lib", Mode.TARGET, ObjcProvider.SKYLARK_CONSTRUCTOR) .get(LIBRARY)); CustomCommandLine commandLine = CustomCommandLine.builder() .addExecPath("--input_archive", j2objcArchive) .addExecPath("--output_archive", prunedJ2ObjcArchive) .addExecPath("--dummy_archive", dummyArchive) .addExecPath("--xcrunwrapper", xcrunwrapper(ruleContext).getExecutable()) .addExecPaths( "--dependency_mapping_files", VectorArg.join(",").each(j2ObjcDependencyMappingFiles)) .addExecPaths( "--header_mapping_files", VectorArg.join(",").each(j2ObjcHeaderMappingFiles)) .addExecPaths( "--archive_source_mapping_files", VectorArg.join(",").each(j2ObjcArchiveSourceMappingFiles)) .add("--entry_classes") .addAll(VectorArg.join(",").each(entryClasses)) .build(); ruleContext.registerAction( ObjcRuleClasses.spawnAppleEnvActionBuilder( XcodeConfigProvider.fromRuleContext(ruleContext), appleConfiguration.getSingleArchPlatform()) .setMnemonic("DummyPruner") .setExecutable(pruner) .addInput(dummyArchive) .addInput(pruner) .addInput(j2objcArchive) .addInput(xcrunwrapper(ruleContext).getExecutable()) .addTransitiveInputs(j2ObjcDependencyMappingFiles) .addTransitiveInputs(j2ObjcHeaderMappingFiles) .addTransitiveInputs(j2ObjcArchiveSourceMappingFiles) .addCommandLine( commandLine, ParamFileInfo.builder(ParameterFile.ParameterFileType.UNQUOTED) .setCharset(ISO_8859_1) .setUseAlways(true) .build()) .addOutput(prunedJ2ObjcArchive) .build(ruleContext)); } } /** Returns archives arising from j2objc transpilation after dead code removal. */ private Iterable<Artifact> computeAndStripPrunedJ2ObjcArchives( J2ObjcEntryClassProvider j2ObjcEntryClassProvider, J2ObjcMappingFileProvider j2ObjcMappingFileProvider, ObjcProvider objcProvider) { Iterable<Artifact> prunedJ2ObjcArchives = ImmutableList.<Artifact>of(); if (stripJ2ObjcDeadCode(j2ObjcEntryClassProvider)) { registerJ2ObjcDeadCodeRemovalActions( objcProvider, j2ObjcMappingFileProvider, j2ObjcEntryClassProvider); prunedJ2ObjcArchives = j2objcPrunedLibraries(objcProvider); } return prunedJ2ObjcArchives; } /** * Returns a nested set of Bazel-built ObjC libraries with all unpruned J2ObjC libraries * substituted with pruned ones. */ private ImmutableList<Artifact> substituteJ2ObjcPrunedLibraries(ObjcProvider objcProvider) { ImmutableList.Builder<Artifact> libraries = new ImmutableList.Builder<>(); Set<Artifact> unprunedJ2ObjcLibs = objcProvider.get(ObjcProvider.J2OBJC_LIBRARY).toSet(); for (Artifact library : objcProvider.getObjcLibraries()) { // If we match an unpruned J2ObjC library, add the pruned version of the J2ObjC static library // instead. if (unprunedJ2ObjcLibs.contains(library)) { libraries.add(intermediateArtifacts.j2objcPrunedArchive(library)); } else { libraries.add(library); } } return libraries.build(); } /** Returns the artifact that should be the outcome of this build's link action */ private Artifact getBinaryToLink() { // When compilation_mode=opt and objc_enable_binary_stripping are specified, the unstripped // binary containing debug symbols is generated by the linker, which also needs the debug // symbols for dead-code removal. The binary is also used to generate dSYM bundle if // --apple_generate_dsym is specified. A symbol strip action is later registered to strip // the symbol table from the unstripped binary. return objcConfiguration.shouldStripBinary() ? intermediateArtifacts.unstrippedSingleArchitectureBinary() : intermediateArtifacts.strippedSingleArchitectureBinary(); } private static CommandLine symbolStripCommandLine( ImmutableList<String> extraFlags, Artifact unstrippedArtifact, Artifact strippedArtifact) { return CustomCommandLine.builder() .add(STRIP) .addAll(extraFlags) .addExecPath("-o", strippedArtifact) .addPath(unstrippedArtifact.getExecPath()) .build(); } /** Signals if stripping should include options for dynamic libraries. */ private enum StrippingType { DEFAULT, DYNAMIC_LIB, KERNEL_EXTENSION } /** * Registers an action that uses the 'strip' tool to perform binary stripping on the given binary * subject to the given {@link StrippingType}. */ private void registerBinaryStripAction(Artifact binaryToLink, StrippingType strippingType) { final ImmutableList<String> stripArgs; if (isTestRule) { // For test targets, only debug symbols are stripped off, since /usr/bin/strip is not able // to strip off all symbols in XCTest bundle. stripArgs = ImmutableList.of("-S"); } else { switch (strippingType) { case DYNAMIC_LIB: case KERNEL_EXTENSION: // For dylibs and kexts, must strip only local symbols. stripArgs = ImmutableList.of("-x"); break; case DEFAULT: stripArgs = ImmutableList.<String>of(); break; default: throw new IllegalArgumentException("Unsupported stripping type " + strippingType); } } Artifact strippedBinary = intermediateArtifacts.strippedSingleArchitectureBinary(); ruleContext.registerAction( ObjcRuleClasses.spawnAppleEnvActionBuilder( XcodeConfigProvider.fromRuleContext(ruleContext), appleConfiguration.getSingleArchPlatform()) .setMnemonic("ObjcBinarySymbolStrip") .setExecutable(xcrunwrapper(ruleContext)) .addCommandLine(symbolStripCommandLine(stripArgs, binaryToLink, strippedBinary)) .addOutput(strippedBinary) .addInput(binaryToLink) .build(ruleContext)); } private CompilationSupport registerGenerateUmbrellaHeaderAction( Artifact umbrellaHeader, Iterable<Artifact> publicHeaders) { ruleContext.registerAction( new UmbrellaHeaderAction( ruleContext.getActionOwner(), umbrellaHeader, publicHeaders, ImmutableList.<PathFragment>of())); return this; } private Optional<Artifact> getPchFile() { if (!usePch) { return Optional.absent(); } Artifact pchHdr = null; if (ruleContext.attributes().has("pch", BuildType.LABEL)) { pchHdr = ruleContext.getPrerequisiteArtifact("pch", Mode.TARGET); } return Optional.fromNullable(pchHdr); } /** * Registers an action that will generate a clang module map for this target, using the hdrs * attribute of this rule. */ CompilationSupport registerGenerateModuleMapAction(CompilationArtifacts compilationArtifacts) { // TODO(bazel-team): Include textual headers in the module map when Xcode 6 support is // dropped. // TODO(b/32225593): Include private headers in the module map. Iterable<Artifact> publicHeaders = attributes.hdrs(); publicHeaders = Iterables.concat(publicHeaders, compilationArtifacts.getAdditionalHdrs()); CppModuleMap moduleMap = intermediateArtifacts.moduleMap(); registerGenerateModuleMapAction(moduleMap, publicHeaders); Optional<Artifact> umbrellaHeader = moduleMap.getUmbrellaHeader(); if (umbrellaHeader.isPresent()) { registerGenerateUmbrellaHeaderAction(umbrellaHeader.get(), publicHeaders); } return this; } /** * Registers an action that will generate a clang module map. * * @param moduleMap the module map to generate * @param publicHeaders the headers that should be directly accessible by dependers * @return this compilation support */ public CompilationSupport registerGenerateModuleMapAction( CppModuleMap moduleMap, Iterable<Artifact> publicHeaders) { publicHeaders = Iterables.filter(publicHeaders, CppFileTypes.MODULE_MAP_HEADER); ruleContext.registerAction( new CppModuleMapAction( ruleContext.getActionOwner(), moduleMap, ImmutableList.<Artifact>of(), publicHeaders, attributes.moduleMapsForDirectDeps(), ImmutableList.<PathFragment>of(), /*compiledModule=*/ true, /*moduleMapHomeIsCwd=*/ false, /* generateSubmodules= */ false, /*externDependencies=*/ true)); return this; } /** * Collector that, given a list of output artifacts, finds and registers coverage notes metadata * for any compilation action. */ private static class ObjcCoverageMetadataCollector extends LocalMetadataCollector { @Override public void collectMetadataArtifacts( Iterable<Artifact> artifacts, AnalysisEnvironment analysisEnvironment, NestedSetBuilder<Artifact> metadataFilesBuilder) { for (Artifact artifact : artifacts) { ActionAnalysisMetadata action = analysisEnvironment.getLocalGeneratingAction(artifact); if (action.getMnemonic().equals("ObjcCompile")) { addOutputs(metadataFilesBuilder, action, ObjcRuleClasses.COVERAGE_NOTES); } } } } private static Iterable<PathFragment> uniqueParentDirectories(Iterable<PathFragment> paths) { ImmutableSet.Builder<PathFragment> parents = new ImmutableSet.Builder<>(); for (PathFragment path : paths) { parents.add(path.getParentDirectory()); } return parents.build(); } /** Holds information about Objective-C compile actions that require header thinning. */ private static final class ObjcHeaderThinningInfo { /** Source file for compile action. */ public final Artifact sourceFile; /** headers_list file for compile action. */ public final Artifact headersListFile; /** Command line arguments for compile action execution. */ public final ImmutableList<String> arguments; public ObjcHeaderThinningInfo( Artifact sourceFile, Artifact headersListFile, ImmutableList<String> arguments) { this.sourceFile = Preconditions.checkNotNull(sourceFile); this.headersListFile = Preconditions.checkNotNull(headersListFile); this.arguments = Preconditions.checkNotNull(arguments); } public ObjcHeaderThinningInfo( Artifact sourceFile, Artifact headersListFile, Iterable<String> arguments) { this(sourceFile, headersListFile, ImmutableList.copyOf(arguments)); } } /** * Returns true when ObjC header thinning is enabled via configuration and an a valid * header_scanner executable target is provided. */ private boolean isHeaderThinningEnabled() { if (objcConfiguration.useExperimentalHeaderThinning() && ruleContext.isAttrDefined(ObjcRuleClasses.HEADER_SCANNER_ATTRIBUTE, BuildType.LABEL)) { FilesToRunProvider tool = getHeaderThinningToolExecutable(); // Additional here to ensure that an Executable Artifact exists to disable where the tool // is an empty filegroup return tool != null && tool.getExecutable() != null; } return false; } private FilesToRunProvider getHeaderThinningToolExecutable() { return ruleContext .getPrerequisite(ObjcRuleClasses.HEADER_SCANNER_ATTRIBUTE, Mode.HOST) .getProvider(FilesToRunProvider.class); } private void registerHeaderScanningActions( CcCompilationOutputs ccCompilationOutputs, ObjcProvider objcProvider, CompilationArtifacts compilationArtifacts) throws RuleErrorException { // PIC is not used for Obj-C builds, if that changes this method will need to change if (includeProcessingType != IncludeProcessingType.HEADER_THINNING || ccCompilationOutputs.getObjectFiles(false).isEmpty()) { return; } try { ImmutableList.Builder<ObjcHeaderThinningInfo> headerThinningInfos = ImmutableList.builder(); AnalysisEnvironment analysisEnvironment = ruleContext.getAnalysisEnvironment(); for (Artifact objectFile : ccCompilationOutputs.getObjectFiles(false)) { ActionAnalysisMetadata generatingAction = analysisEnvironment.getLocalGeneratingAction(objectFile); if (generatingAction instanceof CppCompileAction) { CppCompileAction action = (CppCompileAction) generatingAction; Artifact sourceFile = action.getSourceFile(); if (!sourceFile.isTreeArtifact() && SOURCES_FOR_HEADER_THINNING.matches(sourceFile.getFilename())) { headerThinningInfos.add( new ObjcHeaderThinningInfo( sourceFile, intermediateArtifacts.headersListFile(objectFile), action.getCompilerOptions())); } } } registerHeaderScanningActions( headerThinningInfos.build(), objcProvider, compilationArtifacts); } catch (CommandLineExpansionException e) { throw ruleContext.throwWithRuleError(e.getMessage()); } } /** * Creates and registers ObjcHeaderScanning {@link SpawnAction}. Groups all the actions by their * compilation command line arguments and creates a ObjcHeaderScanning action for each unique one. * * <p>The number of sources to scan per actions are bounded so that targets with a high number of * sources are not penalized. A large number of sources may require a lot of processing * particularly when the headers required for different sources vary greatly and the caching * mechanism in the tool is largely useless. In these instances these actions would benefit by * being distributed so they don't contribute to the critical path. The partition size is * configurable so that it can be tuned. */ private void registerHeaderScanningActions( ImmutableList<ObjcHeaderThinningInfo> headerThinningInfo, ObjcProvider objcProvider, CompilationArtifacts compilationArtifacts) { if (headerThinningInfo.isEmpty()) { return; } ListMultimap<ImmutableList<String>, ObjcHeaderThinningInfo> objcHeaderThinningInfoByCommandLine = groupActionsByCommandLine(headerThinningInfo); // Register a header scanning spawn action for each unique set of command line arguments for (ImmutableList<String> args : objcHeaderThinningInfoByCommandLine.keySet()) { // As infos is in insertion order we should reliably get the same sublists below for (List<ObjcHeaderThinningInfo> partition : Lists.partition( objcHeaderThinningInfoByCommandLine.get(args), objcConfiguration.objcHeaderThinningPartitionSize())) { registerHeaderScanningAction(objcProvider, compilationArtifacts, args, partition); } } } private void registerHeaderScanningAction( ObjcProvider objcProvider, CompilationArtifacts compilationArtifacts, ImmutableList<String> args, List<ObjcHeaderThinningInfo> infos) { SpawnAction.Builder builder = new SpawnAction.Builder() .setMnemonic("ObjcHeaderScanning") .setExecutable(getHeaderThinningToolExecutable()) .addInputs( ruleContext .getPrerequisiteArtifacts(ObjcRuleClasses.APPLE_SDK_ATTRIBUTE, Mode.TARGET) .list()); CustomCommandLine.Builder cmdLine = CustomCommandLine.builder() .add("--arch", appleConfiguration.getSingleArchitecture().toLowerCase()) .add("--platform", appleConfiguration.getSingleArchPlatform().getLowerCaseNameInPlist()) .add( "--sdk_version", XcodeConfig.getXcodeConfigProvider(ruleContext) .getSdkVersionForPlatform(appleConfiguration.getSingleArchPlatform()) .toStringWithMinimumComponents(2)) .add( "--xcode_version", XcodeConfig.getXcodeConfigProvider(ruleContext) .getXcodeVersion() .toStringWithMinimumComponents(2)) .add("--"); for (ObjcHeaderThinningInfo info : infos) { cmdLine.addFormatted( "%s:%s", info.sourceFile.getExecPath(), info.headersListFile.getExecPath()); builder.addInput(info.sourceFile).addOutput(info.headersListFile); } ruleContext.registerAction( builder .addCommandLine(cmdLine.add("--").addAll(args).build()) .addInputs(compilationArtifacts.getPrivateHdrs()) .addTransitiveInputs(attributes.hdrs()) .addTransitiveInputs(objcProvider.get(ObjcProvider.HEADER)) .addInputs(getPchFile().asSet()) .build(ruleContext)); } /** * Groups {@link ObjcHeaderThinningInfo} objects based on the command line arguments of the * ObjcCompile action. * * <p>Grouping by command line arguments allows {@link * #registerHeaderScanningActions(ImmutableList, ObjcProvider, CompilationArtifacts)} to create a * {@link SpawnAction} based on the compiler command line flags that may cause a difference in * behaviour by the preprocessor. Some of the command line arguments must be filtered out as they * change with every source {@link Artifact}; for example the object file (-o) and dotd filenames * (-MF). These arguments are known not to change the preprocessor behaviour. * * @param headerThinningInfos information for compile actions that require header thinning * @return values in {@code headerThinningInfos} grouped by compile action command line arguments */ private static ListMultimap<ImmutableList<String>, ObjcHeaderThinningInfo> groupActionsByCommandLine(ImmutableList<ObjcHeaderThinningInfo> headerThinningInfos) { // Maintain insertion order so that iteration in #registerHeaderScanningActions is deterministic ListMultimap<ImmutableList<String>, ObjcHeaderThinningInfo> objcHeaderThinningInfoByCommandLine = ArrayListMultimap.create(); for (ObjcHeaderThinningInfo info : headerThinningInfos) { ImmutableList.Builder<String> filteredArgumentsBuilder = ImmutableList.builder(); List<String> arguments = info.arguments; for (int i = 0; i < arguments.size(); ++i) { String arg = arguments.get(i); if (arg.equals("-MF") || arg.equals("-o") || arg.equals("-c")) { ++i; } else if (!arg.equals("-MD")) { filteredArgumentsBuilder.add(arg); } } objcHeaderThinningInfoByCommandLine.put(filteredArgumentsBuilder.build(), info); } return objcHeaderThinningInfoByCommandLine; } public static Optional<Artifact> getCustomModuleMap(RuleContext ruleContext) { if (ruleContext.attributes().has("module_map", BuildType.LABEL)) { return Optional.fromNullable(ruleContext.getPrerequisiteArtifact("module_map", Mode.TARGET)); } return Optional.absent(); } }
src/main/java/com/google/devtools/build/lib/rules/objc/CompilationSupport.java
// Copyright 2015 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.rules.objc; import static com.google.common.collect.ImmutableSortedSet.toImmutableSortedSet; import static com.google.devtools.build.lib.packages.ImplicitOutputsFunction.fromTemplates; import static com.google.devtools.build.lib.rules.cpp.Link.LINK_LIBRARY_FILETYPES; import static com.google.devtools.build.lib.rules.objc.ObjcProvider.DEFINE; import static com.google.devtools.build.lib.rules.objc.ObjcProvider.DYNAMIC_FRAMEWORK_FILE; import static com.google.devtools.build.lib.rules.objc.ObjcProvider.FORCE_LOAD_LIBRARY; import static com.google.devtools.build.lib.rules.objc.ObjcProvider.FRAMEWORK_SEARCH_PATHS; import static com.google.devtools.build.lib.rules.objc.ObjcProvider.HEADER; import static com.google.devtools.build.lib.rules.objc.ObjcProvider.IMPORTED_LIBRARY; import static com.google.devtools.build.lib.rules.objc.ObjcProvider.INCLUDE; import static com.google.devtools.build.lib.rules.objc.ObjcProvider.INCLUDE_SYSTEM; import static com.google.devtools.build.lib.rules.objc.ObjcProvider.LIBRARY; import static com.google.devtools.build.lib.rules.objc.ObjcProvider.LINK_INPUTS; import static com.google.devtools.build.lib.rules.objc.ObjcProvider.SDK_DYLIB; import static com.google.devtools.build.lib.rules.objc.ObjcProvider.SDK_FRAMEWORK; import static com.google.devtools.build.lib.rules.objc.ObjcProvider.STATIC_FRAMEWORK_FILE; import static com.google.devtools.build.lib.rules.objc.ObjcRuleClasses.COMPILABLE_SRCS_TYPE; import static com.google.devtools.build.lib.rules.objc.ObjcRuleClasses.HEADERS; import static com.google.devtools.build.lib.rules.objc.ObjcRuleClasses.NON_ARC_SRCS_TYPE; import static com.google.devtools.build.lib.rules.objc.ObjcRuleClasses.PRECOMPILED_SRCS_TYPE; import static com.google.devtools.build.lib.rules.objc.ObjcRuleClasses.SRCS_TYPE; import static com.google.devtools.build.lib.rules.objc.ObjcRuleClasses.STRIP; import static java.nio.charset.StandardCharsets.ISO_8859_1; import static java.util.Comparator.naturalOrder; import static java.util.stream.Collectors.toCollection; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Optional; import com.google.common.base.Preconditions; import com.google.common.base.Predicate; import com.google.common.base.Predicates; import com.google.common.collect.ArrayListMultimap; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.ImmutableSortedSet; import com.google.common.collect.Iterables; import com.google.common.collect.ListMultimap; import com.google.common.collect.Lists; import com.google.common.collect.Sets; import com.google.common.collect.Streams; import com.google.devtools.build.lib.actions.ActionAnalysisMetadata; import com.google.devtools.build.lib.actions.Artifact; import com.google.devtools.build.lib.actions.CommandLine; import com.google.devtools.build.lib.actions.CommandLineExpansionException; import com.google.devtools.build.lib.actions.ParamFileInfo; import com.google.devtools.build.lib.actions.ParameterFile; import com.google.devtools.build.lib.analysis.AnalysisEnvironment; import com.google.devtools.build.lib.analysis.AnalysisUtils; import com.google.devtools.build.lib.analysis.FilesToRunProvider; import com.google.devtools.build.lib.analysis.PrerequisiteArtifacts; import com.google.devtools.build.lib.analysis.RuleContext; import com.google.devtools.build.lib.analysis.TransitiveInfoCollection; import com.google.devtools.build.lib.analysis.actions.CustomCommandLine; import com.google.devtools.build.lib.analysis.actions.CustomCommandLine.VectorArg; import com.google.devtools.build.lib.analysis.actions.ParameterFileWriteAction; import com.google.devtools.build.lib.analysis.actions.SpawnAction; import com.google.devtools.build.lib.analysis.config.BuildConfiguration; import com.google.devtools.build.lib.analysis.configuredtargets.RuleConfiguredTarget.Mode; import com.google.devtools.build.lib.analysis.test.InstrumentedFilesCollector; import com.google.devtools.build.lib.analysis.test.InstrumentedFilesCollector.InstrumentationSpec; import com.google.devtools.build.lib.analysis.test.InstrumentedFilesCollector.LocalMetadataCollector; import com.google.devtools.build.lib.analysis.test.InstrumentedFilesInfo; import com.google.devtools.build.lib.collect.nestedset.NestedSet; import com.google.devtools.build.lib.collect.nestedset.NestedSetBuilder; import com.google.devtools.build.lib.collect.nestedset.Order; import com.google.devtools.build.lib.packages.BuildType; import com.google.devtools.build.lib.packages.ImplicitOutputsFunction.SafeImplicitOutputsFunction; import com.google.devtools.build.lib.packages.RuleClass.ConfiguredTargetFactory.RuleErrorException; import com.google.devtools.build.lib.packages.TargetUtils; import com.google.devtools.build.lib.rules.apple.AppleCommandLineOptions.AppleBitcodeMode; import com.google.devtools.build.lib.rules.apple.AppleConfiguration; import com.google.devtools.build.lib.rules.apple.XcodeConfig; import com.google.devtools.build.lib.rules.apple.XcodeConfigProvider; import com.google.devtools.build.lib.rules.cpp.CcCommon; import com.google.devtools.build.lib.rules.cpp.CcCompilationContext; import com.google.devtools.build.lib.rules.cpp.CcCompilationHelper; import com.google.devtools.build.lib.rules.cpp.CcCompilationHelper.CompilationInfo; import com.google.devtools.build.lib.rules.cpp.CcCompilationOutputs; import com.google.devtools.build.lib.rules.cpp.CcInfo; import com.google.devtools.build.lib.rules.cpp.CcLinkingHelper; import com.google.devtools.build.lib.rules.cpp.CcToolchain; import com.google.devtools.build.lib.rules.cpp.CcToolchainFeatures.CollidingProvidesException; import com.google.devtools.build.lib.rules.cpp.CcToolchainFeatures.FeatureConfiguration; import com.google.devtools.build.lib.rules.cpp.CcToolchainProvider; import com.google.devtools.build.lib.rules.cpp.CcToolchainVariables.VariablesExtension; import com.google.devtools.build.lib.rules.cpp.CppCompileAction; import com.google.devtools.build.lib.rules.cpp.CppConfiguration; import com.google.devtools.build.lib.rules.cpp.CppFileTypes; import com.google.devtools.build.lib.rules.cpp.CppHelper; import com.google.devtools.build.lib.rules.cpp.CppLinkAction; import com.google.devtools.build.lib.rules.cpp.CppLinkActionBuilder; import com.google.devtools.build.lib.rules.cpp.CppModuleMap; import com.google.devtools.build.lib.rules.cpp.CppModuleMapAction; import com.google.devtools.build.lib.rules.cpp.CppRuleClasses; import com.google.devtools.build.lib.rules.cpp.FdoContext; import com.google.devtools.build.lib.rules.cpp.IncludeProcessing; import com.google.devtools.build.lib.rules.cpp.IncludeScanning; import com.google.devtools.build.lib.rules.cpp.Link.LinkTargetType; import com.google.devtools.build.lib.rules.cpp.Link.LinkingMode; import com.google.devtools.build.lib.rules.cpp.NoProcessing; import com.google.devtools.build.lib.rules.cpp.PrecompiledFiles; import com.google.devtools.build.lib.rules.cpp.UmbrellaHeaderAction; import com.google.devtools.build.lib.rules.objc.ObjcProvider.Flag; import com.google.devtools.build.lib.rules.objc.ObjcVariablesExtension.VariableCategory; import com.google.devtools.build.lib.util.FileTypeSet; import com.google.devtools.build.lib.util.Pair; import com.google.devtools.build.lib.vfs.PathFragment; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.TreeMap; import java.util.stream.Stream; import javax.annotation.Nullable; /** * Support for rules that compile sources. Provides ways to determine files that should be output, * registering Xcode settings and generating the various actions that might be needed for * compilation. * * <p>A subclass should express a particular strategy for compile and link action registration. * Subclasses should implement the API without adding new visible methods - rule implementations * should be able to use a {@link CompilationSupport} instance to compile and link source without * knowing the subclass being used. * * <p>Methods on this class can be called in any order without impacting the result. */ public class CompilationSupport { @VisibleForTesting static final String OBJC_MODULE_CACHE_DIR_NAME = "_objc_module_cache"; @VisibleForTesting static final String MODULES_CACHE_PATH_WARNING = "setting '-fmodules-cache-path' manually in copts is unsupported"; @VisibleForTesting static final String ABSOLUTE_INCLUDES_PATH_FORMAT = "The path '%s' is absolute, but only relative paths are allowed."; // Flags for clang 6.1(xcode 6.4) @VisibleForTesting static final ImmutableList<String> CLANG_GCOV_COVERAGE_FLAGS = ImmutableList.of("-fprofile-arcs", "-ftest-coverage"); @VisibleForTesting static final ImmutableList<String> CLANG_LLVM_COVERAGE_FLAGS = ImmutableList.of("-fprofile-instr-generate", "-fcoverage-mapping"); // These are added by Xcode when building, because the simulator is built on OSX // frameworks so we aim compile to match the OSX objc runtime. @VisibleForTesting static final ImmutableList<String> SIMULATOR_COMPILE_FLAGS = ImmutableList.of( "-fexceptions", "-fasm-blocks", "-fobjc-abi-version=2", "-fobjc-legacy-dispatch"); /** * Frameworks implicitly linked to iOS, watchOS, and tvOS binaries when using legacy compilation. */ @VisibleForTesting static final ImmutableList<SdkFramework> AUTOMATIC_SDK_FRAMEWORKS = ImmutableList.of(new SdkFramework("Foundation"), new SdkFramework("UIKit")); /** Selects cc libraries that have alwayslink=1. */ private static final Predicate<Artifact> ALWAYS_LINKED_CC_LIBRARY = input -> LINK_LIBRARY_FILETYPES.matches(input.getFilename()); private static final String OBJC_MODULE_FEATURE_NAME = "use_objc_modules"; private static final String NO_ENABLE_MODULES_FEATURE_NAME = "no_enable_modules"; private static final String DEAD_STRIP_FEATURE_NAME = "dead_strip"; /** * Enabled if this target's rule is not a test rule. Binary stripping should not be applied in the * link step. TODO(b/36562173): Replace this behavior with a condition on bundle creation. * * <p>Note that the crosstool does not support feature negation in FlagSet.with_feature, which is * the mechanism used to condition linker arguments here. Therefore, we expose * "is_not_test_target" instead of the more intuitive "is_test_target". */ private static final String IS_NOT_TEST_TARGET_FEATURE_NAME = "is_not_test_target"; /** Enabled if this target generates debug symbols in a dSYM file. */ private static final String GENERATE_DSYM_FILE_FEATURE_NAME = "generate_dsym_file"; /** * Enabled if this target does not generate debug symbols. * * <p>Note that the crosstool does not support feature negation in FlagSet.with_feature, which is * the mechanism used to condition linker arguments here. Therefore, we expose * "no_generate_debug_symbols" in addition to "generate_dsym_file" */ private static final String NO_GENERATE_DEBUG_SYMBOLS_FEATURE_NAME = "no_generate_debug_symbols"; private static final String GENERATE_LINKMAP_FEATURE_NAME = "generate_linkmap"; private static final String XCODE_VERSION_FEATURE_NAME_PREFIX = "xcode_"; /** Enabled if this target has objc sources in its transitive closure. */ private static final String CONTAINS_OBJC = "contains_objc_sources"; private static final ImmutableList<String> ACTIVATED_ACTIONS = ImmutableList.of( "objc-compile", "objc++-compile", "objc-archive", "objc-fully-link", "objc-executable", "objc++-executable", "assemble", "preprocess-assemble", "c-compile", "c++-compile"); /** The kind of include processing to use. */ enum IncludeProcessingType { HEADER_THINNING, INCLUDE_SCANNING, NO_PROCESSING; } /** Returns the location of the xcrunwrapper tool. */ public static final FilesToRunProvider xcrunwrapper(RuleContext ruleContext) { return ruleContext.getExecutablePrerequisite("$xcrunwrapper", Mode.HOST); } /** Returns the location of the libtool tool. */ public static final FilesToRunProvider libtool(RuleContext ruleContext) { return ruleContext.getExecutablePrerequisite(ObjcRuleClasses.LIBTOOL_ATTRIBUTE, Mode.HOST); } /** * Files which can be instrumented along with the attributes in which they may occur and the * attributes along which they are propagated from dependencies (via {@link * InstrumentedFilesInfo}). */ private static final InstrumentationSpec INSTRUMENTATION_SPEC = new InstrumentationSpec( FileTypeSet.of(ObjcRuleClasses.NON_CPP_SOURCES, ObjcRuleClasses.CPP_SOURCES, HEADERS)) .withSourceAttributes("srcs", "non_arc_srcs", "hdrs") .withDependencyAttributes("deps", "data", "binary", "xctest_app"); /** Defines a library that contains the transitive closure of dependencies. */ public static final SafeImplicitOutputsFunction FULLY_LINKED_LIB = fromTemplates("%{name}_fully_linked.a"); /** * Returns additional inputs to include processing, outside of the headers provided by * ObjProvider. */ private Iterable<Artifact> getExtraIncludeProcessingInputs( Collection<Artifact> privateHdrs, Artifact pchHdr) { Iterable<Artifact> extraInputs = privateHdrs; if (pchHdr != null) { extraInputs = Iterables.concat(extraInputs, ImmutableList.of(pchHdr)); } return extraInputs; } /** * Create and return the include processing to be used. Only HeaderThinning uses potentialInputs. */ private IncludeProcessing createIncludeProcessing(Iterable<Artifact> potentialInputs) { switch (includeProcessingType) { case HEADER_THINNING: return new HeaderThinning(potentialInputs); case INCLUDE_SCANNING: return IncludeScanning.INSTANCE; default: return NoProcessing.INSTANCE; } } private CompilationInfo compile( ObjcProvider objcProvider, VariablesExtension extension, ExtraCompileArgs extraCompileArgs, CcToolchainProvider ccToolchain, FdoContext fdoContext, Iterable<PathFragment> priorityHeaders, Collection<Artifact> sources, Collection<Artifact> privateHdrs, Collection<Artifact> publicHdrs, Collection<Artifact> dependentGeneratedHdrs, Artifact pchHdr, // TODO(b/70777494): Find out how deps get used and remove if not needed. Iterable<? extends TransitiveInfoCollection> deps, ObjcCppSemantics semantics, String purpose, boolean generateModuleMap) throws RuleErrorException, InterruptedException { CcCompilationHelper result = new CcCompilationHelper( ruleContext, ruleContext, ruleContext.getLabel(), CppHelper.getGrepIncludes(ruleContext), semantics, getFeatureConfiguration(ruleContext, ccToolchain, buildConfiguration, objcProvider), CcCompilationHelper.SourceCategory.CC_AND_OBJC, ccToolchain, fdoContext, buildConfiguration, TargetUtils.getExecutionInfo( ruleContext.getRule(), ruleContext.isAllowTagsPropagation())) .addSources(sources) .addPrivateHeaders(privateHdrs) .addDefines(objcProvider.get(DEFINE)) .addPublicHeaders(publicHdrs) .addPrivateHeadersUnchecked(dependentGeneratedHdrs) .addCcCompilationContexts( Streams.stream(AnalysisUtils.getProviders(deps, CcInfo.PROVIDER)) .map(CcInfo::getCcCompilationContext) .collect(ImmutableList.toImmutableList())) .setCopts( ImmutableList.<String>builder() .addAll(getCompileRuleCopts()) .addAll( ruleContext .getFragment(ObjcConfiguration.class) .getCoptsForCompilationMode()) .addAll(extraCompileArgs) .build()) .addFrameworkIncludeDirs( frameworkHeaderSearchPathFragments(objcProvider, ruleContext, buildConfiguration)) .addIncludeDirs(priorityHeaders) .addIncludeDirs(objcProvider.get(INCLUDE)) .addSystemIncludeDirs(objcProvider.get(INCLUDE_SYSTEM)) .setCppModuleMap(intermediateArtifacts.moduleMap()) .setPropagateModuleMapToCompileAction(false) .addVariableExtension(extension) .setPurpose(purpose) .addQuoteIncludeDirs( ObjcCommon.userHeaderSearchPaths(objcProvider, ruleContext.getConfiguration())) .setCodeCoverageEnabled(CcCompilationHelper.isCodeCoverageEnabled(ruleContext)) .setHeadersCheckingMode(semantics.determineHeadersCheckingMode(ruleContext)); if (pchHdr != null) { result.addNonModuleMapHeader(pchHdr); } if (getCustomModuleMap(ruleContext).isPresent() || !generateModuleMap) { result.doNotGenerateModuleMap(); } return result.compile(); } private Pair<CcCompilationOutputs, ImmutableMap<String, NestedSet<Artifact>>> ccCompileAndLink( ObjcProvider objcProvider, CompilationArtifacts compilationArtifacts, ObjcVariablesExtension.Builder extensionBuilder, ExtraCompileArgs extraCompileArgs, CcToolchainProvider ccToolchain, FdoContext fdoContext, Iterable<PathFragment> priorityHeaders, LinkTargetType linkType, Artifact linkActionInput) throws RuleErrorException, InterruptedException { PrecompiledFiles precompiledFiles = new PrecompiledFiles(ruleContext); Collection<Artifact> arcSources = ImmutableSortedSet.copyOf(compilationArtifacts.getSrcs()); Collection<Artifact> nonArcSources = ImmutableSortedSet.copyOf(compilationArtifacts.getNonArcSrcs()); Collection<Artifact> privateHdrs = ImmutableSortedSet.copyOf(compilationArtifacts.getPrivateHdrs()); Collection<Artifact> publicHdrs = Stream.concat( Streams.stream(attributes.hdrs()), Streams.stream(compilationArtifacts.getAdditionalHdrs())) .collect(toImmutableSortedSet(naturalOrder())); // This is a hack to inject generated headers into the action graph for include scanning. This // is supposed to be done via the compilation prerequisite middleman artifact of dependent // CcCompilationContexts, but ObjcProvider does not propagate that. This issue will go away // when we finish migrating the compile info in ObjcProvider to CcCompilationContext. // // To limit the extra work we're adding, we only add what is required, i.e. the // generated headers. Collection<Artifact> dependentGeneratedHdrs = (includeProcessingType == IncludeProcessingType.INCLUDE_SCANNING) ? objcProvider.getGeneratedHeaderList() : ImmutableList.of(); Artifact pchHdr = getPchFile().orNull(); Iterable<? extends TransitiveInfoCollection> deps = ruleContext.getPrerequisites("deps", Mode.TARGET); ObjcCppSemantics semantics = createObjcCppSemantics(objcProvider, privateHdrs, pchHdr); String purpose = String.format("%s_objc_arc", semantics.getPurpose()); extensionBuilder.setArcEnabled(true); CompilationInfo objcArcCompilationInfo = compile( objcProvider, extensionBuilder.build(), extraCompileArgs, ccToolchain, fdoContext, priorityHeaders, arcSources, privateHdrs, publicHdrs, dependentGeneratedHdrs, pchHdr, deps, semantics, purpose, /* generateModuleMap= */ true); purpose = String.format("%s_non_objc_arc", semantics.getPurpose()); extensionBuilder.setArcEnabled(false); CompilationInfo nonObjcArcCompilationInfo = compile( objcProvider, extensionBuilder.build(), extraCompileArgs, ccToolchain, fdoContext, priorityHeaders, nonArcSources, privateHdrs, publicHdrs, dependentGeneratedHdrs, pchHdr, deps, semantics, purpose, // Only generate the module map once (see above) and re-use it here. /* generateModuleMap= */ false); FeatureConfiguration featureConfiguration = getFeatureConfiguration(ruleContext, ccToolchain, buildConfiguration, objcProvider); CcLinkingHelper resultLink = new CcLinkingHelper( ruleContext, ruleContext.getLabel(), ruleContext, ruleContext, semantics, featureConfiguration, ccToolchain, fdoContext, buildConfiguration, ruleContext.getFragment(CppConfiguration.class), ruleContext.getSymbolGenerator(), TargetUtils.getExecutionInfo( ruleContext.getRule(), ruleContext.isAllowTagsPropagation())) .setGrepIncludes(CppHelper.getGrepIncludes(ruleContext)) .setIsStampingEnabled(AnalysisUtils.isStampingEnabled(ruleContext)) .setTestOrTestOnlyTarget(ruleContext.isTestTarget() || ruleContext.isTestOnlyTarget()) .addCcLinkingContexts( CppHelper.getLinkingContextsFromDeps( ImmutableList.copyOf(ruleContext.getPrerequisites("deps", Mode.TARGET)))) .setLinkedArtifactNameSuffix(intermediateArtifacts.archiveFileNameSuffix()) .setNeverLink(true) .addVariableExtension(extensionBuilder.build()); if (linkType != null) { resultLink.setStaticLinkType(linkType); } if (linkActionInput != null) { resultLink.addLinkActionInput(linkActionInput); } CcCompilationContext.Builder ccCompilationContextBuilder = CcCompilationContext.builder( ruleContext, ruleContext.getConfiguration(), ruleContext.getLabel()); ccCompilationContextBuilder.mergeDependentCcCompilationContexts( Arrays.asList( objcArcCompilationInfo.getCcCompilationContext(), nonObjcArcCompilationInfo.getCcCompilationContext())); ccCompilationContextBuilder.setPurpose( String.format("%s_merged_arc_non_arc_objc", semantics.getPurpose())); ccCompilationContextBuilder.addQuoteIncludeDirs( ObjcCommon.userHeaderSearchPaths(objcProvider, ruleContext.getConfiguration())); CcCompilationOutputs precompiledFilesObjects = CcCompilationOutputs.builder() .addObjectFiles(precompiledFiles.getObjectFiles(/* usePic= */ false)) .addPicObjectFiles(precompiledFiles.getObjectFiles(/* usePic= */ true)) .build(); CcCompilationOutputs.Builder compilationOutputsBuilder = CcCompilationOutputs.builder() .merge(objcArcCompilationInfo.getCcCompilationOutputs()) .merge(nonObjcArcCompilationInfo.getCcCompilationOutputs()) .merge(precompiledFilesObjects); compilationOutputsBuilder.merge(objcArcCompilationInfo.getCcCompilationOutputs()); compilationOutputsBuilder.merge(nonObjcArcCompilationInfo.getCcCompilationOutputs()); CcCompilationOutputs compilationOutputs = compilationOutputsBuilder.build(); if (!compilationOutputs.isEmpty()) { resultLink.link(compilationOutputs); } CppConfiguration cppConfiguration = ruleContext.getFragment(CppConfiguration.class); Map<String, NestedSet<Artifact>> arcOutputGroups = CcCompilationHelper.buildOutputGroupsForEmittingCompileProviders( objcArcCompilationInfo.getCcCompilationOutputs(), objcArcCompilationInfo.getCcCompilationContext(), cppConfiguration, ccToolchain, featureConfiguration, ruleContext); Map<String, NestedSet<Artifact>> nonArcOutputGroups = CcCompilationHelper.buildOutputGroupsForEmittingCompileProviders( nonObjcArcCompilationInfo.getCcCompilationOutputs(), nonObjcArcCompilationInfo.getCcCompilationContext(), cppConfiguration, ccToolchain, featureConfiguration, ruleContext); Map<String, NestedSet<Artifact>> mergedOutputGroups = CcCommon.mergeOutputGroups(ImmutableList.of(arcOutputGroups, nonArcOutputGroups)); return new Pair<>(compilationOutputsBuilder.build(), ImmutableMap.copyOf(mergedOutputGroups)); } private ObjcCppSemantics createObjcCppSemantics( ObjcProvider objcProvider, Collection<Artifact> privateHdrs, Artifact pchHdr) { Iterable<Artifact> extraInputs = getExtraIncludeProcessingInputs(privateHdrs, pchHdr); return new ObjcCppSemantics( objcProvider, includeProcessingType, createIncludeProcessing(Iterables.concat(extraInputs, objcProvider.get(HEADER))), extraInputs, ruleContext.getFragment(ObjcConfiguration.class), intermediateArtifacts, buildConfiguration, attributes.enableModules()); } private FeatureConfiguration getFeatureConfiguration( RuleContext ruleContext, CcToolchainProvider ccToolchain, BuildConfiguration configuration, ObjcProvider objcProvider) { boolean isHost = ruleContext.getConfiguration().isHostConfiguration(); ImmutableSet.Builder<String> activatedCrosstoolSelectables = ImmutableSet.<String>builder() .addAll(ccToolchain.getFeatures().getDefaultFeaturesAndActionConfigs()) .addAll(ACTIVATED_ACTIONS) .addAll( ruleContext .getFragment(AppleConfiguration.class) .getBitcodeMode() .getFeatureNames()) // We create a module map by default to allow for Swift interop. .add(CppRuleClasses.MODULE_MAPS) .add(CppRuleClasses.COMPILE_ALL_MODULES) .add(CppRuleClasses.EXCLUDE_PRIVATE_HEADERS_IN_MODULE_MAPS) .add(CppRuleClasses.ONLY_DOTH_HEADERS_IN_MODULE_MAPS) .add(CppRuleClasses.DEPENDENCY_FILE) .add(CppRuleClasses.INCLUDE_PATHS) .add(isHost ? "host" : "nonhost") .add(configuration.getCompilationMode().toString()); if (configuration.getFragment(ObjcConfiguration.class).moduleMapsEnabled() && !getCustomModuleMap(ruleContext).isPresent()) { activatedCrosstoolSelectables.add(OBJC_MODULE_FEATURE_NAME); } if (!attributes.enableModules()) { activatedCrosstoolSelectables.add(NO_ENABLE_MODULES_FEATURE_NAME); } if (configuration.getFragment(ObjcConfiguration.class).shouldStripBinary()) { activatedCrosstoolSelectables.add(DEAD_STRIP_FEATURE_NAME); } if (getPchFile().isPresent()) { activatedCrosstoolSelectables.add("pch"); } if (!isTestRule) { activatedCrosstoolSelectables.add(IS_NOT_TEST_TARGET_FEATURE_NAME); } if (configuration.getFragment(ObjcConfiguration.class).generateDsym()) { activatedCrosstoolSelectables.add(GENERATE_DSYM_FILE_FEATURE_NAME); } else { activatedCrosstoolSelectables.add(NO_GENERATE_DEBUG_SYMBOLS_FEATURE_NAME); } if (configuration.getFragment(ObjcConfiguration.class).generateLinkmap()) { activatedCrosstoolSelectables.add(GENERATE_LINKMAP_FEATURE_NAME); } AppleBitcodeMode bitcodeMode = configuration.getFragment(AppleConfiguration.class).getBitcodeMode(); if (bitcodeMode != AppleBitcodeMode.NONE) { activatedCrosstoolSelectables.addAll(bitcodeMode.getFeatureNames()); } if (objcProvider.is(Flag.USES_OBJC)) { activatedCrosstoolSelectables.add(CONTAINS_OBJC); } // Add a feature identifying the Xcode version so CROSSTOOL authors can enable flags for // particular versions of Xcode. To ensure consistency across platforms, use exactly two // components in the version number. activatedCrosstoolSelectables.add( XCODE_VERSION_FEATURE_NAME_PREFIX + XcodeConfig.getXcodeConfigProvider(ruleContext) .getXcodeVersion() .toStringWithComponents(2)); activatedCrosstoolSelectables.addAll(ruleContext.getFeatures()); CppConfiguration cppConfiguration = ruleContext.getFragment(CppConfiguration.class); activatedCrosstoolSelectables.addAll(CcCommon.getCoverageFeatures(cppConfiguration)); try { return ccToolchain .getFeatures() .getFeatureConfiguration(activatedCrosstoolSelectables.build()); } catch (CollidingProvidesException e) { ruleContext.ruleError(e.getMessage()); return FeatureConfiguration.EMPTY; } } /** Iterable wrapper providing strong type safety for arguments to binary linking. */ static final class ExtraLinkArgs extends IterableWrapper<String> { ExtraLinkArgs(String... args) { super(args); } ExtraLinkArgs(Iterable<String> args) { super(args); } } /** Iterable wrapper providing strong type safety for extra compile flags. */ static final class ExtraCompileArgs extends IterableWrapper<String> { static final ExtraCompileArgs NONE = new ExtraCompileArgs(); ExtraCompileArgs(String... args) { super(args); } } @VisibleForTesting static final String FILE_IN_SRCS_AND_HDRS_WARNING_FORMAT = "File '%s' is in both srcs and hdrs."; @VisibleForTesting static final String FILE_IN_SRCS_AND_NON_ARC_SRCS_ERROR_FORMAT = "File '%s' is present in both srcs and non_arc_srcs which is forbidden."; @VisibleForTesting static final String BOTH_MODULE_NAME_AND_MODULE_MAP_SPECIFIED = "Specifying both module_name and module_map is invalid, please remove one of them."; static final ImmutableList<String> DEFAULT_COMPILER_FLAGS = ImmutableList.of("-DOS_IOS"); /** * Set of {@link com.google.devtools.build.lib.util.FileType} of source artifacts that are * compatible with header thinning. */ private static final FileTypeSet SOURCES_FOR_HEADER_THINNING = FileTypeSet.of( CppFileTypes.OBJC_SOURCE, CppFileTypes.OBJCPP_SOURCE, CppFileTypes.CPP_SOURCE, CppFileTypes.C_SOURCE); /** Returns information about the given rule's compilation artifacts. */ // TODO(bazel-team): Remove this information from ObjcCommon and move it internal to this class. static CompilationArtifacts compilationArtifacts(RuleContext ruleContext) { return compilationArtifacts(ruleContext, ObjcRuleClasses.intermediateArtifacts(ruleContext)); } /** * Returns information about the given rule's compilation artifacts. Dependencies specified in the * current rule's attributes are obtained via {@code ruleContext}. Output locations are determined * using the given {@code intermediateArtifacts} object. The fact that these are distinct objects * allows the caller to generate compilation actions pertaining to a configuration separate from * the current rule's configuration. */ static CompilationArtifacts compilationArtifacts( RuleContext ruleContext, IntermediateArtifacts intermediateArtifacts) { PrerequisiteArtifacts srcs = ruleContext.getPrerequisiteArtifacts("srcs", Mode.TARGET).errorsForNonMatching(SRCS_TYPE); return new CompilationArtifacts.Builder() .addSrcs(srcs.filter(COMPILABLE_SRCS_TYPE).list()) .addNonArcSrcs( ruleContext .getPrerequisiteArtifacts("non_arc_srcs", Mode.TARGET) .errorsForNonMatching(NON_ARC_SRCS_TYPE) .list()) .addPrivateHdrs(srcs.filter(HEADERS).list()) .addPrecompiledSrcs(srcs.filter(PRECOMPILED_SRCS_TYPE).list()) .setIntermediateArtifacts(intermediateArtifacts) .build(); } /** Returns a list of framework header search path fragments. */ static ImmutableList<PathFragment> frameworkHeaderSearchPathFragments( ObjcProvider provider, RuleContext ruleContext, BuildConfiguration buildConfiguration) throws InterruptedException { ImmutableList.Builder<PathFragment> searchPaths = new ImmutableList.Builder<>(); return searchPaths .addAll(uniqueParentDirectories(provider.get(FRAMEWORK_SEARCH_PATHS))) .build(); } /** Returns a list of framework header search paths. */ static ImmutableList<String> frameworkHeaderSearchPaths( ObjcProvider provider, RuleContext ruleContext, BuildConfiguration buildConfiguration) throws InterruptedException { ImmutableList.Builder<String> searchPaths = new ImmutableList.Builder<>(); return searchPaths .addAll( Iterables.transform( frameworkHeaderSearchPathFragments(provider, ruleContext, buildConfiguration), PathFragment::getSafePathString)) .build(); } /** Returns a list of framework library search paths. */ static ImmutableList<String> frameworkLibrarySearchPaths( ObjcProvider provider, RuleContext ruleContext, BuildConfiguration buildConfiguration) throws InterruptedException { ImmutableList.Builder<String> searchPaths = new ImmutableList.Builder<>(); return searchPaths // Add library search paths corresponding to custom (non-SDK) frameworks. For each framework // foo/bar.framework, include "foo" as a search path. .addAll(provider.staticFrameworkPaths()) .addAll(provider.dynamicFrameworkPaths()) .build(); } private final RuleContext ruleContext; private final BuildConfiguration buildConfiguration; private final ObjcConfiguration objcConfiguration; private final AppleConfiguration appleConfiguration; private final CompilationAttributes attributes; private final IntermediateArtifacts intermediateArtifacts; private final Map<String, NestedSet<Artifact>> outputGroupCollector; private final ImmutableList.Builder<Artifact> objectFilesCollector; private final CcToolchainProvider toolchain; private final boolean isTestRule; private final boolean usePch; private final IncludeProcessingType includeProcessingType; /** * Creates a new compilation support for the given rule and build configuration. * * <p>All actions will be created under the given build configuration, which may be different than * the current rule context configuration. * * <p>The compilation and linking flags will be retrieved from the given compilation attributes. * The names of the generated artifacts will be retrieved from the given intermediate artifacts. * * <p>By instantiating multiple compilation supports for the same rule but with intermediate * artifacts with different output prefixes, multiple archives can be compiled for the same rule * context. */ private CompilationSupport( RuleContext ruleContext, BuildConfiguration buildConfiguration, IntermediateArtifacts intermediateArtifacts, CompilationAttributes compilationAttributes, Map<String, NestedSet<Artifact>> outputGroupCollector, ImmutableList.Builder<Artifact> objectFilesCollector, CcToolchainProvider toolchain, boolean isTestRule, boolean usePch) throws InterruptedException { this.ruleContext = ruleContext; this.buildConfiguration = buildConfiguration; this.objcConfiguration = buildConfiguration.getFragment(ObjcConfiguration.class); this.appleConfiguration = buildConfiguration.getFragment(AppleConfiguration.class); this.attributes = compilationAttributes; this.intermediateArtifacts = intermediateArtifacts; this.isTestRule = isTestRule; this.outputGroupCollector = outputGroupCollector; this.objectFilesCollector = objectFilesCollector; this.usePch = usePch; if (toolchain == null && ruleContext .attributes() .has(CcToolchain.CC_TOOLCHAIN_DEFAULT_ATTRIBUTE_NAME, BuildType.LABEL)) { toolchain = CppHelper.getToolchainUsingDefaultCcToolchainAttribute(ruleContext); } this.toolchain = toolchain; if (objcConfiguration.shouldScanIncludes()) { includeProcessingType = IncludeProcessingType.INCLUDE_SCANNING; } else if (isHeaderThinningEnabled()) { includeProcessingType = IncludeProcessingType.HEADER_THINNING; } else { includeProcessingType = IncludeProcessingType.NO_PROCESSING; } } /** Builder for {@link CompilationSupport} */ public static class Builder { private RuleContext ruleContext; private BuildConfiguration buildConfiguration; private IntermediateArtifacts intermediateArtifacts; private CompilationAttributes compilationAttributes; private Map<String, NestedSet<Artifact>> outputGroupCollector; private ImmutableList.Builder<Artifact> objectFilesCollector; private CcToolchainProvider toolchain; private boolean isTestRule = false; private boolean usePch = true; /** Sets the {@link RuleContext} for the calling target. */ public Builder setRuleContext(RuleContext ruleContext) { this.ruleContext = ruleContext; return this; } /** Sets the {@link BuildConfiguration} for the calling target. */ public Builder setConfig(BuildConfiguration buildConfiguration) { this.buildConfiguration = buildConfiguration; return this; } /** Sets {@link IntermediateArtifacts} for deriving artifact paths. */ public Builder setIntermediateArtifacts(IntermediateArtifacts intermediateArtifacts) { this.intermediateArtifacts = intermediateArtifacts; return this; } /** Sets {@link CompilationAttributes} for the calling target. */ public Builder setCompilationAttributes(CompilationAttributes compilationAttributes) { this.compilationAttributes = compilationAttributes; return this; } /** * Sets that this {@link CompilationSupport} will not use the pch from the rule context in * determining compilation actions. */ public Builder doNotUsePch() { this.usePch = false; return this; } /** Indicates that this CompilationSupport is for use in a test rule. */ public Builder setIsTestRule() { this.isTestRule = true; return this; } /** * Causes the provided map to be updated with output groups produced by compile action * registration. * * <p>This map is intended to be mutated by {@link * CompilationSupport#registerCompileAndArchiveActions}. The added output groups should be * exported by the calling rule class implementation. */ public Builder setOutputGroupCollector(Map<String, NestedSet<Artifact>> outputGroupCollector) { this.outputGroupCollector = outputGroupCollector; return this; } /** * Set a collector for the object files produced by compile action registration. * * <p>The object files are intended to be added by {@link * CompilationSupport#registerCompileAndArchiveActions}. */ public Builder setObjectFilesCollector(ImmutableList.Builder<Artifact> objectFilesCollector) { this.objectFilesCollector = objectFilesCollector; return this; } /** * Sets {@link CcToolchainProvider} for the calling target. * * <p>This is needed if it can't correctly be inferred directly from the rule context. Setting * to null causes the default to be used as if this was never called. */ public Builder setToolchainProvider(CcToolchainProvider toolchain) { this.toolchain = toolchain; return this; } /** Returns a {@link CompilationSupport} instance. */ public CompilationSupport build() throws InterruptedException { Preconditions.checkNotNull(ruleContext, "CompilationSupport is missing RuleContext"); if (buildConfiguration == null) { buildConfiguration = ruleContext.getConfiguration(); } if (intermediateArtifacts == null) { intermediateArtifacts = ObjcRuleClasses.intermediateArtifacts(ruleContext, buildConfiguration); } if (compilationAttributes == null) { compilationAttributes = CompilationAttributes.Builder.fromRuleContext(ruleContext).build(); } if (outputGroupCollector == null) { outputGroupCollector = new TreeMap<>(); } if (objectFilesCollector == null) { objectFilesCollector = ImmutableList.builder(); } return new CompilationSupport( ruleContext, buildConfiguration, intermediateArtifacts, compilationAttributes, outputGroupCollector, objectFilesCollector, toolchain, isTestRule, usePch); } } /** * Returns a provider that collects this target's instrumented sources as well as those of its * dependencies. * * @param objectFiles the object files generated by this target * @return an instrumented files provider */ public InstrumentedFilesInfo getInstrumentedFilesProvider(ImmutableList<Artifact> objectFiles) { return InstrumentedFilesCollector.collect( ruleContext, INSTRUMENTATION_SPEC, new ObjcCoverageMetadataCollector(), objectFiles, NestedSetBuilder.<Artifact>emptySet(Order.STABLE_ORDER), // The COVERAGE_GCOV_PATH environment variable is added in TestSupport#getExtraProviders() NestedSetBuilder.<Pair<String, String>>emptySet(Order.COMPILE_ORDER), !isTestRule, /* reportedToActualSources= */ NestedSetBuilder.create(Order.STABLE_ORDER)); } /** * Validates compilation-related attributes on this rule. * * @return this compilation support * @throws RuleErrorException if there are attribute errors */ CompilationSupport validateAttributes() throws RuleErrorException { for (PathFragment absoluteInclude : Iterables.filter(attributes.includes(), PathFragment::isAbsolute)) { ruleContext.attributeError( "includes", String.format(ABSOLUTE_INCLUDES_PATH_FORMAT, absoluteInclude)); } if (ruleContext.attributes().has("srcs", BuildType.LABEL_LIST)) { ImmutableSet<Artifact> hdrsSet = ImmutableSet.copyOf(attributes.hdrs()); ImmutableSet<Artifact> srcsSet = ImmutableSet.copyOf(ruleContext.getPrerequisiteArtifacts("srcs", Mode.TARGET).list()); // Check for overlap between srcs and hdrs. for (Artifact header : Sets.intersection(hdrsSet, srcsSet)) { String path = header.getRootRelativePath().toString(); ruleContext.attributeWarning( "srcs", String.format(FILE_IN_SRCS_AND_HDRS_WARNING_FORMAT, path)); } // Check for overlap between srcs and non_arc_srcs. ImmutableSet<Artifact> nonArcSrcsSet = ImmutableSet.copyOf( ruleContext.getPrerequisiteArtifacts("non_arc_srcs", Mode.TARGET).list()); for (Artifact conflict : Sets.intersection(nonArcSrcsSet, srcsSet)) { String path = conflict.getRootRelativePath().toString(); ruleContext.attributeError( "srcs", String.format(FILE_IN_SRCS_AND_NON_ARC_SRCS_ERROR_FORMAT, path)); } } if (ruleContext.attributes().isAttributeValueExplicitlySpecified("module_name") && ruleContext.attributes().isAttributeValueExplicitlySpecified("module_map")) { ruleContext.attributeError("module_name", BOTH_MODULE_NAME_AND_MODULE_MAP_SPECIFIED); } ruleContext.assertNoErrors(); return this; } /** * Registers all actions necessary to compile this rule's sources and archive them. * * @param compilationArtifacts collection of artifacts required for the compilation * @param objcProvider provides all compiling and linking information to register these actions * @param toolchain the toolchain to be used in determining command lines * @return this compilation support * @throws RuleErrorException for invalid crosstool files */ CompilationSupport registerCompileAndArchiveActions( CompilationArtifacts compilationArtifacts, ObjcProvider objcProvider, CcToolchainProvider toolchain) throws RuleErrorException, InterruptedException { return registerCompileAndArchiveActions( compilationArtifacts, objcProvider, ExtraCompileArgs.NONE, ImmutableList.<PathFragment>of(), toolchain, toolchain.getFdoContext()); } /** * Registers all actions necessary to compile this rule's sources and archive them. * * @param common common information about this rule and its dependencies * @return this compilation support * @throws RuleErrorException for invalid crosstool files */ CompilationSupport registerCompileAndArchiveActions(ObjcCommon common) throws RuleErrorException, InterruptedException { return registerCompileAndArchiveActions( common, ExtraCompileArgs.NONE, ImmutableList.<PathFragment>of()); } /** * Registers all actions necessary to compile this rule's sources and archive them. * * @param common common information about this rule and its dependencies * @param priorityHeaders priority headers to be included before the dependency headers * @return this compilation support * @throws RuleErrorException for invalid crosstool files */ CompilationSupport registerCompileAndArchiveActions( ObjcCommon common, Iterable<PathFragment> priorityHeaders) throws RuleErrorException, InterruptedException { return registerCompileAndArchiveActions(common, ExtraCompileArgs.NONE, priorityHeaders); } /** * Registers all actions necessary to compile this rule's sources and archive them. * * @param compilationArtifacts collection of artifacts required for the compilation * @param objcProvider provides all compiling and linking information to register these actions * @param extraCompileArgs args to be added to compile actions * @param priorityHeaders priority headers to be included before the dependency headers * @param ccToolchain the cpp toolchain provider, may be null * @param fdoContext the cpp FDO support provider, may be null * @return this compilation support * @throws RuleErrorException for invalid crosstool files */ CompilationSupport registerCompileAndArchiveActions( CompilationArtifacts compilationArtifacts, ObjcProvider objcProvider, ExtraCompileArgs extraCompileArgs, Iterable<PathFragment> priorityHeaders, @Nullable CcToolchainProvider ccToolchain, @Nullable FdoContext fdoContext) throws RuleErrorException, InterruptedException { Preconditions.checkNotNull(ccToolchain); Preconditions.checkNotNull(fdoContext); ObjcVariablesExtension.Builder extension = new ObjcVariablesExtension.Builder() .setRuleContext(ruleContext) .setObjcProvider(objcProvider) .setCompilationArtifacts(compilationArtifacts) .setIntermediateArtifacts(intermediateArtifacts) .setConfiguration(buildConfiguration) .setFrameworkSearchPath( frameworkHeaderSearchPaths(objcProvider, ruleContext, buildConfiguration)); Pair<CcCompilationOutputs, ImmutableMap<String, NestedSet<Artifact>>> compilationInfo; if (compilationArtifacts.getArchive().isPresent()) { Artifact objList = intermediateArtifacts.archiveObjList(); extension.addVariableCategory(VariableCategory.ARCHIVE_VARIABLES); compilationInfo = ccCompileAndLink( objcProvider, compilationArtifacts, extension, extraCompileArgs, ccToolchain, fdoContext, priorityHeaders, LinkTargetType.OBJC_ARCHIVE, objList); // TODO(b/30783125): Signal the need for this action in the CROSSTOOL. registerObjFilelistAction( compilationInfo.getFirst().getObjectFiles(/* usePic= */ false), objList); } else { compilationInfo = ccCompileAndLink( objcProvider, compilationArtifacts, extension, extraCompileArgs, ccToolchain, fdoContext, priorityHeaders, /* linkType */ null, /* linkActionInput */ null); } objectFilesCollector.addAll(compilationInfo.getFirst().getObjectFiles(/* usePic= */ false)); outputGroupCollector.putAll(compilationInfo.getSecond()); registerHeaderScanningActions(compilationInfo.getFirst(), objcProvider, compilationArtifacts); return this; } /** * Registers all actions necessary to compile this rule's sources and archive them. * * @param common common information about this rule and its dependencies * @param extraCompileArgs args to be added to compile actions * @param priorityHeaders priority headers to be included before the dependency headers * @return this compilation support * @throws RuleErrorException for invalid crosstool files */ CompilationSupport registerCompileAndArchiveActions( ObjcCommon common, ExtraCompileArgs extraCompileArgs, Iterable<PathFragment> priorityHeaders) throws RuleErrorException, InterruptedException { if (common.getCompilationArtifacts().isPresent()) { registerCompileAndArchiveActions( common.getCompilationArtifacts().get(), common.getObjcProvider(), extraCompileArgs, priorityHeaders, toolchain, toolchain.getFdoContext()); } return this; } private StrippingType getStrippingType(ExtraLinkArgs extraLinkArgs) { if (Iterables.contains(extraLinkArgs, "-dynamiclib")) { return StrippingType.DYNAMIC_LIB; } if (Iterables.contains(extraLinkArgs, "-kext")) { return StrippingType.KERNEL_EXTENSION; } return StrippingType.DEFAULT; } /** * Registers any actions necessary to link this rule and its dependencies. Automatically infers * the toolchain from the configuration of this CompilationSupport - if a different toolchain is * required, use the custom toolchain override. * * <p>Dsym bundle is generated if {@link ObjcConfiguration#generateDsym()} is set. * * <p>When Bazel flags {@code --compilation_mode=opt} and {@code --objc_enable_binary_stripping} * are specified, additional optimizations will be performed on the linked binary: all-symbol * stripping (using {@code /usr/bin/strip}) and dead-code stripping (using linker flags: {@code * -dead_strip} and {@code -no_dead_strip_inits_and_terms}). * * @param objcProvider common information about this rule's attributes and its dependencies * @param j2ObjcMappingFileProvider contains mapping files for j2objc transpilation * @param j2ObjcEntryClassProvider contains j2objc entry class information for dead code removal * @param extraLinkArgs any additional arguments to pass to the linker * @param extraLinkInputs any additional input artifacts to pass to the link action * @return this compilation support */ CompilationSupport registerLinkActions( ObjcProvider objcProvider, J2ObjcMappingFileProvider j2ObjcMappingFileProvider, J2ObjcEntryClassProvider j2ObjcEntryClassProvider, ExtraLinkArgs extraLinkArgs, Iterable<Artifact> extraLinkInputs, CcToolchainProvider toolchain) throws InterruptedException, RuleErrorException { Iterable<Artifact> prunedJ2ObjcArchives = computeAndStripPrunedJ2ObjcArchives( j2ObjcEntryClassProvider, j2ObjcMappingFileProvider, objcProvider); ImmutableList<Artifact> bazelBuiltLibraries = Iterables.isEmpty(prunedJ2ObjcArchives) ? objcProvider.getObjcLibraries() : substituteJ2ObjcPrunedLibraries(objcProvider); Artifact inputFileList = intermediateArtifacts.linkerObjList(); ImmutableSet<Artifact> forceLinkArtifacts = getForceLoadArtifacts(objcProvider); Iterable<Artifact> objFiles = Iterables.concat( bazelBuiltLibraries, objcProvider.get(IMPORTED_LIBRARY), objcProvider.getCcLibraries()); // Clang loads archives specified in filelists and also specified as -force_load twice, // resulting in duplicate symbol errors unless they are deduped. objFiles = Iterables.filter(objFiles, Predicates.not(Predicates.in(forceLinkArtifacts))); registerObjFilelistAction(objFiles, inputFileList); LinkTargetType linkType = objcProvider.is(Flag.USES_CPP) ? LinkTargetType.OBJCPP_EXECUTABLE : LinkTargetType.OBJC_EXECUTABLE; ObjcVariablesExtension.Builder extensionBuilder = new ObjcVariablesExtension.Builder() .setRuleContext(ruleContext) .setObjcProvider(objcProvider) .setConfiguration(buildConfiguration) .setIntermediateArtifacts(intermediateArtifacts) .setFrameworkNames(frameworkNames(objcProvider)) .setFrameworkSearchPath( frameworkLibrarySearchPaths(objcProvider, ruleContext, buildConfiguration)) .setLibraryNames(libraryNames(objcProvider)) .setForceLoadArtifacts(getForceLoadArtifacts(objcProvider)) .setAttributeLinkopts(attributes.linkopts()) .addVariableCategory(VariableCategory.EXECUTABLE_LINKING_VARIABLES); Artifact binaryToLink = getBinaryToLink(); FdoContext fdoContext = toolchain.getFdoContext(); CppLinkActionBuilder executableLinkAction = new CppLinkActionBuilder( ruleContext, ruleContext, ruleContext.getLabel(), binaryToLink, ruleContext.getConfiguration(), toolchain, fdoContext, getFeatureConfiguration(ruleContext, toolchain, buildConfiguration, objcProvider), createObjcCppSemantics( objcProvider, /* privateHdrs= */ ImmutableList.of(), /* pchHdr= */ null)) .setGrepIncludes(CppHelper.getGrepIncludes(ruleContext)) .setIsStampingEnabled(AnalysisUtils.isStampingEnabled(ruleContext)) .setTestOrTestOnlyTarget(ruleContext.isTestOnlyTarget() || ruleContext.isTestTarget()) .setMnemonic("ObjcLink") .addActionInputs(bazelBuiltLibraries) .addActionInputs(objcProvider.getCcLibraries()) .addTransitiveActionInputs(objcProvider.get(IMPORTED_LIBRARY)) .addTransitiveActionInputs(objcProvider.get(STATIC_FRAMEWORK_FILE)) .addTransitiveActionInputs(objcProvider.get(DYNAMIC_FRAMEWORK_FILE)) .addTransitiveActionInputs(objcProvider.get(LINK_INPUTS)) .setLinkerFiles(toolchain.getLinkerFiles()) .addActionInputs(prunedJ2ObjcArchives) .addActionInputs(extraLinkInputs) .addActionInput(inputFileList) .setLinkType(linkType) .setLinkingMode(LinkingMode.STATIC) .addLinkopts(ImmutableList.copyOf(extraLinkArgs)); if (objcConfiguration.generateDsym()) { Artifact dsymSymbol = objcConfiguration.shouldStripBinary() ? intermediateArtifacts.dsymSymbolForUnstrippedBinary() : intermediateArtifacts.dsymSymbolForStrippedBinary(); extensionBuilder .setDsymSymbol(dsymSymbol) .addVariableCategory(VariableCategory.DSYM_VARIABLES); executableLinkAction.addActionOutput(dsymSymbol); } if (objcConfiguration.generateLinkmap()) { Artifact linkmap = intermediateArtifacts.linkmap(); extensionBuilder.setLinkmap(linkmap).addVariableCategory(VariableCategory.LINKMAP_VARIABLES); executableLinkAction.addActionOutput(linkmap); } if (appleConfiguration.getBitcodeMode() == AppleBitcodeMode.EMBEDDED) { Artifact bitcodeSymbolMap = intermediateArtifacts.bitcodeSymbolMap(); extensionBuilder .setBitcodeSymbolMap(bitcodeSymbolMap) .addVariableCategory(VariableCategory.BITCODE_VARIABLES); executableLinkAction.addActionOutput(bitcodeSymbolMap); } executableLinkAction.addVariablesExtension(extensionBuilder.build()); ruleContext.registerAction(executableLinkAction.build()); if (objcConfiguration.shouldStripBinary()) { registerBinaryStripAction(binaryToLink, getStrippingType(extraLinkArgs)); } return this; } /** * Returns the copts for the compile action in the current rule context (using a combination of * the rule's "copts" attribute as well as the current configuration copts). */ private Iterable<String> getCompileRuleCopts() { List<String> copts = Stream.concat(objcConfiguration.getCopts().stream(), attributes.copts().stream()) .collect(toCollection(ArrayList::new)); for (String copt : copts) { if (copt.contains("-fmodules-cache-path")) { // Bazel decides on the cache path location. ruleContext.ruleWarning(MODULES_CACHE_PATH_WARNING); } } if (attributes.enableModules() && !getCustomModuleMap(ruleContext).isPresent()) { copts.add("-fmodules"); } if (copts.contains("-fmodules")) { // If modules are enabled, clang caches module information. If unspecified, this is a // system-wide cache directory, which is a problem for remote executors which may run // multiple actions with different source trees that can't share this cache. // We thus set its path to the root of the genfiles directory. // Unfortunately, this cache contains non-hermetic information, thus we avoid declaring it as // an implicit output (as outputs must be hermetic). String cachePath = buildConfiguration.getGenfilesFragment() + "/" + OBJC_MODULE_CACHE_DIR_NAME; copts.add("-fmodules-cache-path=" + cachePath); } return copts; } /** * Registers an action that writes given set of object files to the given objList. This objList is * suitable to signal symbols to archive in a libtool archiving invocation. */ private CompilationSupport registerObjFilelistAction( Iterable<Artifact> objFiles, Artifact objList) { ImmutableSet<Artifact> dedupedObjFiles = ImmutableSet.copyOf(objFiles); CustomCommandLine.Builder objFilesToLinkParam = new CustomCommandLine.Builder(); ImmutableList.Builder<Artifact> treeObjFiles = new ImmutableList.Builder<>(); for (Artifact objFile : dedupedObjFiles) { // If the obj file is a tree artifact, we need to expand it into the contained individual // files properly. if (objFile.isTreeArtifact()) { treeObjFiles.add(objFile); objFilesToLinkParam.addExpandedTreeArtifactExecPaths(objFile); } else { objFilesToLinkParam.addPath(objFile.getExecPath()); } } ruleContext.registerAction( new ParameterFileWriteAction( ruleContext.getActionOwner(), treeObjFiles.build(), objList, objFilesToLinkParam.build(), ParameterFile.ParameterFileType.UNQUOTED, ISO_8859_1)); return this; } /** * Registers an action to create an archive artifact by fully (statically) linking all transitive * dependencies of this rule. * * @param objcProvider provides all compiling and linking information to create this artifact * @param outputArchive the output artifact for this action */ public CompilationSupport registerFullyLinkAction( ObjcProvider objcProvider, Artifact outputArchive) throws InterruptedException, RuleErrorException { return registerFullyLinkAction( objcProvider, outputArchive, toolchain, toolchain.getFdoContext()); } /** * Registers an action to create an archive artifact by fully (statically) linking all transitive * dependencies of this rule. * * @param objcProvider provides all compiling and linking information to create this artifact * @param outputArchive the output artifact for this action * @param ccToolchain the cpp toolchain provider, may be null * @param fdoContext the cpp FDO support provider, may be null * @return this {@link CompilationSupport} instance */ CompilationSupport registerFullyLinkAction( ObjcProvider objcProvider, Artifact outputArchive, @Nullable CcToolchainProvider ccToolchain, @Nullable FdoContext fdoContext) throws InterruptedException, RuleErrorException { Preconditions.checkNotNull(ccToolchain); Preconditions.checkNotNull(fdoContext); PathFragment labelName = PathFragment.create(ruleContext.getLabel().getName()); String libraryIdentifier = ruleContext .getPackageDirectory() .getRelative(labelName.replaceName("lib" + labelName.getBaseName())) .getPathString(); ObjcVariablesExtension extension = new ObjcVariablesExtension.Builder() .setRuleContext(ruleContext) .setObjcProvider(objcProvider) .setConfiguration(buildConfiguration) .setIntermediateArtifacts(intermediateArtifacts) .setFrameworkSearchPath( frameworkHeaderSearchPaths(objcProvider, ruleContext, buildConfiguration)) .setFullyLinkArchive(outputArchive) .addVariableCategory(VariableCategory.FULLY_LINK_VARIABLES) .build(); CppLinkAction fullyLinkAction = new CppLinkActionBuilder( ruleContext, ruleContext, ruleContext.getLabel(), outputArchive, ruleContext.getConfiguration(), ccToolchain, fdoContext, getFeatureConfiguration(ruleContext, ccToolchain, buildConfiguration, objcProvider), createObjcCppSemantics( objcProvider, /* privateHdrs= */ ImmutableList.of(), /* pchHdr= */ null)) .setGrepIncludes(CppHelper.getGrepIncludes(ruleContext)) .setIsStampingEnabled(AnalysisUtils.isStampingEnabled(ruleContext)) .setTestOrTestOnlyTarget(ruleContext.isTestOnlyTarget() || ruleContext.isTestTarget()) .addActionInputs(objcProvider.getObjcLibraries()) .addActionInputs(objcProvider.getCcLibraries()) .addActionInputs(objcProvider.get(IMPORTED_LIBRARY).toSet()) .setLinkerFiles(ccToolchain.getLinkerFiles()) .setLinkType(LinkTargetType.OBJC_FULLY_LINKED_ARCHIVE) .setLinkingMode(LinkingMode.STATIC) .setLibraryIdentifier(libraryIdentifier) .addVariablesExtension(extension) .build(); ruleContext.registerAction(fullyLinkAction); return this; } /** * Returns all framework names to pass to the linker using {@code -framework} flags. For a * framework in the directory foo/bar.framework, the name is "bar". Each framework is found * without using the full path by means of the framework search paths. Search paths are added by * {@link#commonLinkAndCompileFlagsForClang(ObjcProvider, ObjcConfiguration, AppleConfiguration)}) * * <p>It's awful that we can't pass the full path to the framework and avoid framework search * paths, but this is imposed on us by clang. clang does not support passing the full path to the * framework, so Bazel cannot do it either. */ private Set<String> frameworkNames(ObjcProvider provider) { Set<String> names = new LinkedHashSet<>(); Iterables.addAll(names, SdkFramework.names(provider.get(SDK_FRAMEWORK))); Iterables.addAll(names, provider.staticFrameworkNames()); Iterables.addAll(names, provider.dynamicFrameworkNames()); return names; } /** Returns libraries that should be passed to the linker. */ private ImmutableList<String> libraryNames(ObjcProvider objcProvider) { ImmutableList.Builder<String> args = new ImmutableList.Builder<>(); for (String dylib : objcProvider.get(SDK_DYLIB)) { if (dylib.startsWith("lib")) { // remove lib prefix if it exists which is standard // for libraries (libxml.dylib -> -lxml). dylib = dylib.substring(3); } args.add(dylib); } return args.build(); } /** Returns libraries that should be passed into the linker with {@code -force_load}. */ private ImmutableSet<Artifact> getForceLoadArtifacts(ObjcProvider objcProvider) { List<Artifact> ccLibraries = objcProvider.getCcLibraries(); Iterable<Artifact> ccLibrariesToForceLoad = Iterables.filter(ccLibraries, ALWAYS_LINKED_CC_LIBRARY); return ImmutableSet.<Artifact>builder() .addAll(objcProvider.get(FORCE_LOAD_LIBRARY)) .addAll(ccLibrariesToForceLoad) .build(); } /** Returns pruned J2Objc archives for this target. */ private ImmutableList<Artifact> j2objcPrunedLibraries(ObjcProvider objcProvider) { ImmutableList.Builder<Artifact> j2objcPrunedLibraryBuilder = ImmutableList.builder(); for (Artifact j2objcLibrary : objcProvider.get(ObjcProvider.J2OBJC_LIBRARY)) { j2objcPrunedLibraryBuilder.add(intermediateArtifacts.j2objcPrunedArchive(j2objcLibrary)); } return j2objcPrunedLibraryBuilder.build(); } /** Returns true if this build should strip J2Objc dead code. */ private boolean stripJ2ObjcDeadCode(J2ObjcEntryClassProvider j2ObjcEntryClassProvider) { J2ObjcConfiguration j2objcConfiguration = buildConfiguration.getFragment(J2ObjcConfiguration.class); // Only perform J2ObjC dead code stripping if flag --j2objc_dead_code_removal is specified and // users have specified entry classes. return j2objcConfiguration.removeDeadCode() && !j2ObjcEntryClassProvider.getEntryClasses().isEmpty(); } /** Registers actions to perform J2Objc dead code removal. */ private void registerJ2ObjcDeadCodeRemovalActions( ObjcProvider objcProvider, J2ObjcMappingFileProvider j2ObjcMappingFileProvider, J2ObjcEntryClassProvider j2ObjcEntryClassProvider) { NestedSet<String> entryClasses = j2ObjcEntryClassProvider.getEntryClasses(); Artifact pruner = ruleContext.getPrerequisiteArtifact("$j2objc_dead_code_pruner", Mode.HOST); NestedSet<Artifact> j2ObjcDependencyMappingFiles = j2ObjcMappingFileProvider.getDependencyMappingFiles(); NestedSet<Artifact> j2ObjcHeaderMappingFiles = j2ObjcMappingFileProvider.getHeaderMappingFiles(); NestedSet<Artifact> j2ObjcArchiveSourceMappingFiles = j2ObjcMappingFileProvider.getArchiveSourceMappingFiles(); for (Artifact j2objcArchive : objcProvider.get(ObjcProvider.J2OBJC_LIBRARY)) { Artifact prunedJ2ObjcArchive = intermediateArtifacts.j2objcPrunedArchive(j2objcArchive); Artifact dummyArchive = Iterables.getOnlyElement( ruleContext .getPrerequisite("$dummy_lib", Mode.TARGET, ObjcProvider.SKYLARK_CONSTRUCTOR) .get(LIBRARY)); CustomCommandLine commandLine = CustomCommandLine.builder() .addExecPath("--input_archive", j2objcArchive) .addExecPath("--output_archive", prunedJ2ObjcArchive) .addExecPath("--dummy_archive", dummyArchive) .addExecPath("--xcrunwrapper", xcrunwrapper(ruleContext).getExecutable()) .addExecPaths( "--dependency_mapping_files", VectorArg.join(",").each(j2ObjcDependencyMappingFiles)) .addExecPaths( "--header_mapping_files", VectorArg.join(",").each(j2ObjcHeaderMappingFiles)) .addExecPaths( "--archive_source_mapping_files", VectorArg.join(",").each(j2ObjcArchiveSourceMappingFiles)) .add("--entry_classes") .addAll(VectorArg.join(",").each(entryClasses)) .build(); ruleContext.registerAction( ObjcRuleClasses.spawnAppleEnvActionBuilder( XcodeConfigProvider.fromRuleContext(ruleContext), appleConfiguration.getSingleArchPlatform()) .setMnemonic("DummyPruner") .setExecutable(pruner) .addInput(dummyArchive) .addInput(pruner) .addInput(j2objcArchive) .addInput(xcrunwrapper(ruleContext).getExecutable()) .addTransitiveInputs(j2ObjcDependencyMappingFiles) .addTransitiveInputs(j2ObjcHeaderMappingFiles) .addTransitiveInputs(j2ObjcArchiveSourceMappingFiles) .addCommandLine( commandLine, ParamFileInfo.builder(ParameterFile.ParameterFileType.UNQUOTED) .setCharset(ISO_8859_1) .setUseAlways(true) .build()) .addOutput(prunedJ2ObjcArchive) .build(ruleContext)); } } /** Returns archives arising from j2objc transpilation after dead code removal. */ private Iterable<Artifact> computeAndStripPrunedJ2ObjcArchives( J2ObjcEntryClassProvider j2ObjcEntryClassProvider, J2ObjcMappingFileProvider j2ObjcMappingFileProvider, ObjcProvider objcProvider) { Iterable<Artifact> prunedJ2ObjcArchives = ImmutableList.<Artifact>of(); if (stripJ2ObjcDeadCode(j2ObjcEntryClassProvider)) { registerJ2ObjcDeadCodeRemovalActions( objcProvider, j2ObjcMappingFileProvider, j2ObjcEntryClassProvider); prunedJ2ObjcArchives = j2objcPrunedLibraries(objcProvider); } return prunedJ2ObjcArchives; } /** * Returns a nested set of Bazel-built ObjC libraries with all unpruned J2ObjC libraries * substituted with pruned ones. */ private ImmutableList<Artifact> substituteJ2ObjcPrunedLibraries(ObjcProvider objcProvider) { ImmutableList.Builder<Artifact> libraries = new ImmutableList.Builder<>(); Set<Artifact> unprunedJ2ObjcLibs = objcProvider.get(ObjcProvider.J2OBJC_LIBRARY).toSet(); for (Artifact library : objcProvider.getObjcLibraries()) { // If we match an unpruned J2ObjC library, add the pruned version of the J2ObjC static library // instead. if (unprunedJ2ObjcLibs.contains(library)) { libraries.add(intermediateArtifacts.j2objcPrunedArchive(library)); } else { libraries.add(library); } } return libraries.build(); } /** Returns the artifact that should be the outcome of this build's link action */ private Artifact getBinaryToLink() { // When compilation_mode=opt and objc_enable_binary_stripping are specified, the unstripped // binary containing debug symbols is generated by the linker, which also needs the debug // symbols for dead-code removal. The binary is also used to generate dSYM bundle if // --apple_generate_dsym is specified. A symbol strip action is later registered to strip // the symbol table from the unstripped binary. return objcConfiguration.shouldStripBinary() ? intermediateArtifacts.unstrippedSingleArchitectureBinary() : intermediateArtifacts.strippedSingleArchitectureBinary(); } private static CommandLine symbolStripCommandLine( ImmutableList<String> extraFlags, Artifact unstrippedArtifact, Artifact strippedArtifact) { return CustomCommandLine.builder() .add(STRIP) .addAll(extraFlags) .addExecPath("-o", strippedArtifact) .addPath(unstrippedArtifact.getExecPath()) .build(); } /** Signals if stripping should include options for dynamic libraries. */ private enum StrippingType { DEFAULT, DYNAMIC_LIB, KERNEL_EXTENSION } /** * Registers an action that uses the 'strip' tool to perform binary stripping on the given binary * subject to the given {@link StrippingType}. */ private void registerBinaryStripAction(Artifact binaryToLink, StrippingType strippingType) { final ImmutableList<String> stripArgs; if (isTestRule) { // For test targets, only debug symbols are stripped off, since /usr/bin/strip is not able // to strip off all symbols in XCTest bundle. stripArgs = ImmutableList.of("-S"); } else { switch (strippingType) { case DYNAMIC_LIB: case KERNEL_EXTENSION: // For dylibs and kexts, must strip only local symbols. stripArgs = ImmutableList.of("-x"); break; case DEFAULT: stripArgs = ImmutableList.<String>of(); break; default: throw new IllegalArgumentException("Unsupported stripping type " + strippingType); } } Artifact strippedBinary = intermediateArtifacts.strippedSingleArchitectureBinary(); ruleContext.registerAction( ObjcRuleClasses.spawnAppleEnvActionBuilder( XcodeConfigProvider.fromRuleContext(ruleContext), appleConfiguration.getSingleArchPlatform()) .setMnemonic("ObjcBinarySymbolStrip") .setExecutable(xcrunwrapper(ruleContext)) .addCommandLine(symbolStripCommandLine(stripArgs, binaryToLink, strippedBinary)) .addOutput(strippedBinary) .addInput(binaryToLink) .build(ruleContext)); } private CompilationSupport registerGenerateUmbrellaHeaderAction( Artifact umbrellaHeader, Iterable<Artifact> publicHeaders) { ruleContext.registerAction( new UmbrellaHeaderAction( ruleContext.getActionOwner(), umbrellaHeader, publicHeaders, ImmutableList.<PathFragment>of())); return this; } private Optional<Artifact> getPchFile() { if (!usePch) { return Optional.absent(); } Artifact pchHdr = null; if (ruleContext.attributes().has("pch", BuildType.LABEL)) { pchHdr = ruleContext.getPrerequisiteArtifact("pch", Mode.TARGET); } return Optional.fromNullable(pchHdr); } /** * Registers an action that will generate a clang module map for this target, using the hdrs * attribute of this rule. */ CompilationSupport registerGenerateModuleMapAction(CompilationArtifacts compilationArtifacts) { // TODO(bazel-team): Include textual headers in the module map when Xcode 6 support is // dropped. // TODO(b/32225593): Include private headers in the module map. Iterable<Artifact> publicHeaders = attributes.hdrs(); publicHeaders = Iterables.concat(publicHeaders, compilationArtifacts.getAdditionalHdrs()); CppModuleMap moduleMap = intermediateArtifacts.moduleMap(); registerGenerateModuleMapAction(moduleMap, publicHeaders); Optional<Artifact> umbrellaHeader = moduleMap.getUmbrellaHeader(); if (umbrellaHeader.isPresent()) { registerGenerateUmbrellaHeaderAction(umbrellaHeader.get(), publicHeaders); } return this; } /** * Registers an action that will generate a clang module map. * * @param moduleMap the module map to generate * @param publicHeaders the headers that should be directly accessible by dependers * @return this compilation support */ public CompilationSupport registerGenerateModuleMapAction( CppModuleMap moduleMap, Iterable<Artifact> publicHeaders) { publicHeaders = Iterables.filter(publicHeaders, CppFileTypes.MODULE_MAP_HEADER); ruleContext.registerAction( new CppModuleMapAction( ruleContext.getActionOwner(), moduleMap, ImmutableList.<Artifact>of(), publicHeaders, attributes.moduleMapsForDirectDeps(), ImmutableList.<PathFragment>of(), /*compiledModule=*/ true, /*moduleMapHomeIsCwd=*/ false, /* generateSubmodules= */ false, /*externDependencies=*/ true)); return this; } /** * Collector that, given a list of output artifacts, finds and registers coverage notes metadata * for any compilation action. */ private static class ObjcCoverageMetadataCollector extends LocalMetadataCollector { @Override public void collectMetadataArtifacts( Iterable<Artifact> artifacts, AnalysisEnvironment analysisEnvironment, NestedSetBuilder<Artifact> metadataFilesBuilder) { for (Artifact artifact : artifacts) { ActionAnalysisMetadata action = analysisEnvironment.getLocalGeneratingAction(artifact); if (action.getMnemonic().equals("ObjcCompile")) { addOutputs(metadataFilesBuilder, action, ObjcRuleClasses.COVERAGE_NOTES); } } } } private static Iterable<PathFragment> uniqueParentDirectories(Iterable<PathFragment> paths) { ImmutableSet.Builder<PathFragment> parents = new ImmutableSet.Builder<>(); for (PathFragment path : paths) { parents.add(path.getParentDirectory()); } return parents.build(); } /** Holds information about Objective-C compile actions that require header thinning. */ private static final class ObjcHeaderThinningInfo { /** Source file for compile action. */ public final Artifact sourceFile; /** headers_list file for compile action. */ public final Artifact headersListFile; /** Command line arguments for compile action execution. */ public final ImmutableList<String> arguments; public ObjcHeaderThinningInfo( Artifact sourceFile, Artifact headersListFile, ImmutableList<String> arguments) { this.sourceFile = Preconditions.checkNotNull(sourceFile); this.headersListFile = Preconditions.checkNotNull(headersListFile); this.arguments = Preconditions.checkNotNull(arguments); } public ObjcHeaderThinningInfo( Artifact sourceFile, Artifact headersListFile, Iterable<String> arguments) { this(sourceFile, headersListFile, ImmutableList.copyOf(arguments)); } } /** * Returns true when ObjC header thinning is enabled via configuration and an a valid * header_scanner executable target is provided. */ private boolean isHeaderThinningEnabled() { if (objcConfiguration.useExperimentalHeaderThinning() && ruleContext.isAttrDefined(ObjcRuleClasses.HEADER_SCANNER_ATTRIBUTE, BuildType.LABEL)) { FilesToRunProvider tool = getHeaderThinningToolExecutable(); // Additional here to ensure that an Executable Artifact exists to disable where the tool // is an empty filegroup return tool != null && tool.getExecutable() != null; } return false; } private FilesToRunProvider getHeaderThinningToolExecutable() { return ruleContext .getPrerequisite(ObjcRuleClasses.HEADER_SCANNER_ATTRIBUTE, Mode.HOST) .getProvider(FilesToRunProvider.class); } private void registerHeaderScanningActions( CcCompilationOutputs ccCompilationOutputs, ObjcProvider objcProvider, CompilationArtifacts compilationArtifacts) throws RuleErrorException { // PIC is not used for Obj-C builds, if that changes this method will need to change if (includeProcessingType != IncludeProcessingType.HEADER_THINNING || ccCompilationOutputs.getObjectFiles(false).isEmpty()) { return; } try { ImmutableList.Builder<ObjcHeaderThinningInfo> headerThinningInfos = ImmutableList.builder(); AnalysisEnvironment analysisEnvironment = ruleContext.getAnalysisEnvironment(); for (Artifact objectFile : ccCompilationOutputs.getObjectFiles(false)) { ActionAnalysisMetadata generatingAction = analysisEnvironment.getLocalGeneratingAction(objectFile); if (generatingAction instanceof CppCompileAction) { CppCompileAction action = (CppCompileAction) generatingAction; Artifact sourceFile = action.getSourceFile(); if (!sourceFile.isTreeArtifact() && SOURCES_FOR_HEADER_THINNING.matches(sourceFile.getFilename())) { headerThinningInfos.add( new ObjcHeaderThinningInfo( sourceFile, intermediateArtifacts.headersListFile(objectFile), action.getCompilerOptions())); } } } registerHeaderScanningActions( headerThinningInfos.build(), objcProvider, compilationArtifacts); } catch (CommandLineExpansionException e) { throw ruleContext.throwWithRuleError(e.getMessage()); } } /** * Creates and registers ObjcHeaderScanning {@link SpawnAction}. Groups all the actions by their * compilation command line arguments and creates a ObjcHeaderScanning action for each unique one. * * <p>The number of sources to scan per actions are bounded so that targets with a high number of * sources are not penalized. A large number of sources may require a lot of processing * particularly when the headers required for different sources vary greatly and the caching * mechanism in the tool is largely useless. In these instances these actions would benefit by * being distributed so they don't contribute to the critical path. The partition size is * configurable so that it can be tuned. */ private void registerHeaderScanningActions( ImmutableList<ObjcHeaderThinningInfo> headerThinningInfo, ObjcProvider objcProvider, CompilationArtifacts compilationArtifacts) { if (headerThinningInfo.isEmpty()) { return; } ListMultimap<ImmutableList<String>, ObjcHeaderThinningInfo> objcHeaderThinningInfoByCommandLine = groupActionsByCommandLine(headerThinningInfo); // Register a header scanning spawn action for each unique set of command line arguments for (ImmutableList<String> args : objcHeaderThinningInfoByCommandLine.keySet()) { // As infos is in insertion order we should reliably get the same sublists below for (List<ObjcHeaderThinningInfo> partition : Lists.partition( objcHeaderThinningInfoByCommandLine.get(args), objcConfiguration.objcHeaderThinningPartitionSize())) { registerHeaderScanningAction(objcProvider, compilationArtifacts, args, partition); } } } private void registerHeaderScanningAction( ObjcProvider objcProvider, CompilationArtifacts compilationArtifacts, ImmutableList<String> args, List<ObjcHeaderThinningInfo> infos) { SpawnAction.Builder builder = new SpawnAction.Builder() .setMnemonic("ObjcHeaderScanning") .setExecutable(getHeaderThinningToolExecutable()) .addInputs( ruleContext .getPrerequisiteArtifacts(ObjcRuleClasses.APPLE_SDK_ATTRIBUTE, Mode.TARGET) .list()); CustomCommandLine.Builder cmdLine = CustomCommandLine.builder() .add("--arch", appleConfiguration.getSingleArchitecture().toLowerCase()) .add("--platform", appleConfiguration.getSingleArchPlatform().getLowerCaseNameInPlist()) .add( "--sdk_version", XcodeConfig.getXcodeConfigProvider(ruleContext) .getSdkVersionForPlatform(appleConfiguration.getSingleArchPlatform()) .toStringWithMinimumComponents(2)) .add( "--xcode_version", XcodeConfig.getXcodeConfigProvider(ruleContext) .getXcodeVersion() .toStringWithMinimumComponents(2)) .add("--"); for (ObjcHeaderThinningInfo info : infos) { cmdLine.addFormatted( "%s:%s", info.sourceFile.getExecPath(), info.headersListFile.getExecPath()); builder.addInput(info.sourceFile).addOutput(info.headersListFile); } ruleContext.registerAction( builder .addCommandLine(cmdLine.add("--").addAll(args).build()) .addInputs(compilationArtifacts.getPrivateHdrs()) .addTransitiveInputs(attributes.hdrs()) .addTransitiveInputs(objcProvider.get(ObjcProvider.HEADER)) .addInputs(getPchFile().asSet()) .build(ruleContext)); } /** * Groups {@link ObjcHeaderThinningInfo} objects based on the command line arguments of the * ObjcCompile action. * * <p>Grouping by command line arguments allows {@link * #registerHeaderScanningActions(ImmutableList, ObjcProvider, CompilationArtifacts)} to create a * {@link SpawnAction} based on the compiler command line flags that may cause a difference in * behaviour by the preprocessor. Some of the command line arguments must be filtered out as they * change with every source {@link Artifact}; for example the object file (-o) and dotd filenames * (-MF). These arguments are known not to change the preprocessor behaviour. * * @param headerThinningInfos information for compile actions that require header thinning * @return values in {@code headerThinningInfos} grouped by compile action command line arguments */ private static ListMultimap<ImmutableList<String>, ObjcHeaderThinningInfo> groupActionsByCommandLine(ImmutableList<ObjcHeaderThinningInfo> headerThinningInfos) { // Maintain insertion order so that iteration in #registerHeaderScanningActions is deterministic ListMultimap<ImmutableList<String>, ObjcHeaderThinningInfo> objcHeaderThinningInfoByCommandLine = ArrayListMultimap.create(); for (ObjcHeaderThinningInfo info : headerThinningInfos) { ImmutableList.Builder<String> filteredArgumentsBuilder = ImmutableList.builder(); List<String> arguments = info.arguments; for (int i = 0; i < arguments.size(); ++i) { String arg = arguments.get(i); if (arg.equals("-MF") || arg.equals("-o") || arg.equals("-c")) { ++i; } else if (!arg.equals("-MD")) { filteredArgumentsBuilder.add(arg); } } objcHeaderThinningInfoByCommandLine.put(filteredArgumentsBuilder.build(), info); } return objcHeaderThinningInfoByCommandLine; } public static Optional<Artifact> getCustomModuleMap(RuleContext ruleContext) { if (ruleContext.attributes().has("module_map", BuildType.LABEL)) { return Optional.fromNullable(ruleContext.getPrerequisiteArtifact("module_map", Mode.TARGET)); } return Optional.absent(); } }
Delete unused arguments to framework search path functions They are left over from the framework cleanup. RELNOTES: None PiperOrigin-RevId: 279752015
src/main/java/com/google/devtools/build/lib/rules/objc/CompilationSupport.java
Delete unused arguments to framework search path functions
<ide><path>rc/main/java/com/google/devtools/build/lib/rules/objc/CompilationSupport.java <ide> .getCoptsForCompilationMode()) <ide> .addAll(extraCompileArgs) <ide> .build()) <del> .addFrameworkIncludeDirs( <del> frameworkHeaderSearchPathFragments(objcProvider, ruleContext, buildConfiguration)) <add> .addFrameworkIncludeDirs(frameworkHeaderSearchPathFragments(objcProvider)) <ide> .addIncludeDirs(priorityHeaders) <ide> .addIncludeDirs(objcProvider.get(INCLUDE)) <ide> .addSystemIncludeDirs(objcProvider.get(INCLUDE_SYSTEM)) <ide> } <ide> <ide> /** Returns a list of framework header search path fragments. */ <del> static ImmutableList<PathFragment> frameworkHeaderSearchPathFragments( <del> ObjcProvider provider, RuleContext ruleContext, BuildConfiguration buildConfiguration) <add> static ImmutableList<PathFragment> frameworkHeaderSearchPathFragments(ObjcProvider provider) <ide> throws InterruptedException { <ide> ImmutableList.Builder<PathFragment> searchPaths = new ImmutableList.Builder<>(); <ide> return searchPaths <ide> } <ide> <ide> /** Returns a list of framework header search paths. */ <del> static ImmutableList<String> frameworkHeaderSearchPaths( <del> ObjcProvider provider, RuleContext ruleContext, BuildConfiguration buildConfiguration) <add> static ImmutableList<String> frameworkHeaderSearchPaths(ObjcProvider provider) <ide> throws InterruptedException { <ide> ImmutableList.Builder<String> searchPaths = new ImmutableList.Builder<>(); <ide> return searchPaths <ide> .addAll( <ide> Iterables.transform( <del> frameworkHeaderSearchPathFragments(provider, ruleContext, buildConfiguration), <del> PathFragment::getSafePathString)) <add> frameworkHeaderSearchPathFragments(provider), PathFragment::getSafePathString)) <ide> .build(); <ide> } <ide> <ide> /** Returns a list of framework library search paths. */ <del> static ImmutableList<String> frameworkLibrarySearchPaths( <del> ObjcProvider provider, RuleContext ruleContext, BuildConfiguration buildConfiguration) <add> static ImmutableList<String> frameworkLibrarySearchPaths(ObjcProvider provider) <ide> throws InterruptedException { <ide> ImmutableList.Builder<String> searchPaths = new ImmutableList.Builder<>(); <ide> return searchPaths <ide> .setCompilationArtifacts(compilationArtifacts) <ide> .setIntermediateArtifacts(intermediateArtifacts) <ide> .setConfiguration(buildConfiguration) <del> .setFrameworkSearchPath( <del> frameworkHeaderSearchPaths(objcProvider, ruleContext, buildConfiguration)); <add> .setFrameworkSearchPath(frameworkHeaderSearchPaths(objcProvider)); <ide> <ide> Pair<CcCompilationOutputs, ImmutableMap<String, NestedSet<Artifact>>> compilationInfo; <ide> <ide> .setConfiguration(buildConfiguration) <ide> .setIntermediateArtifacts(intermediateArtifacts) <ide> .setFrameworkNames(frameworkNames(objcProvider)) <del> .setFrameworkSearchPath( <del> frameworkLibrarySearchPaths(objcProvider, ruleContext, buildConfiguration)) <add> .setFrameworkSearchPath(frameworkLibrarySearchPaths(objcProvider)) <ide> .setLibraryNames(libraryNames(objcProvider)) <ide> .setForceLoadArtifacts(getForceLoadArtifacts(objcProvider)) <ide> .setAttributeLinkopts(attributes.linkopts()) <ide> .setObjcProvider(objcProvider) <ide> .setConfiguration(buildConfiguration) <ide> .setIntermediateArtifacts(intermediateArtifacts) <del> .setFrameworkSearchPath( <del> frameworkHeaderSearchPaths(objcProvider, ruleContext, buildConfiguration)) <add> .setFrameworkSearchPath(frameworkHeaderSearchPaths(objcProvider)) <ide> .setFullyLinkArchive(outputArchive) <ide> .addVariableCategory(VariableCategory.FULLY_LINK_VARIABLES) <ide> .build();
Java
apache-2.0
679eb44465fe9fe88332ab4003df803fddc075b7
0
kubatatami/JudoNetworking
package com.github.kubatatami.judonetworking.callbacks; import java.util.Map; import java.util.WeakHashMap; public class CallbackCache { private final int hash; private AsyncResultCallback callback; private static final Map<Integer, AsyncResultCallback> itemCache = new WeakHashMap<>(); public CallbackCache(Object item, AsyncResultCallback callback) { this.callback = callback; this.hash = item.hashCode(); cancelRequest(hash); itemCache.put(hash, callback); } public boolean consume() { if (validCallback()) { itemCache.remove(hash); return true; } return false; } public boolean cancel() { if (itemCache.containsKey(hash) && !validCallback()) { callback.getAsyncResult().cancel(); return true; } return false; } public static void cancelRequest(Object item) { int itemHash = item.hashCode(); if (itemCache.containsKey(itemHash)) { if (itemCache.get(itemHash).getAsyncResult() != null) { itemCache.get(itemHash).getAsyncResult().cancel(); } itemCache.remove(itemHash); } } private boolean validCallback() { return callback.equals(itemCache.get(hash)); } }
base/src/main/java/com/github/kubatatami/judonetworking/callbacks/CallbackCache.java
package com.github.kubatatami.judonetworking.callbacks; import java.util.Map; import java.util.WeakHashMap; public class CallbackCache { private final int hash; private AsyncResultCallback callback; private static final Map<Integer, AsyncResultCallback> itemCache = new WeakHashMap<>(); public CallbackCache(Object item, AsyncResultCallback callback) { this.callback = callback; this.hash = item.hashCode(); cancelRequest(hash); itemCache.put(hash, callback); } public boolean consume() { if (callback.equals(itemCache.get(hash))) { itemCache.remove(hash); return true; } return false; } public boolean cancel() { if (!callback.equals(itemCache.get(hash))) { callback.getAsyncResult().cancel(); return true; } return false; } public static void cancelRequest(Object item) { int itemHash = item.hashCode(); if (itemCache.containsKey(itemHash)) { if (itemCache.get(itemHash).getAsyncResult() != null) { itemCache.get(itemHash).getAsyncResult().cancel(); } itemCache.remove(itemHash); } } }
bug fix
base/src/main/java/com/github/kubatatami/judonetworking/callbacks/CallbackCache.java
bug fix
<ide><path>ase/src/main/java/com/github/kubatatami/judonetworking/callbacks/CallbackCache.java <ide> } <ide> <ide> public boolean consume() { <del> if (callback.equals(itemCache.get(hash))) { <add> if (validCallback()) { <ide> itemCache.remove(hash); <ide> return true; <ide> } <ide> } <ide> <ide> public boolean cancel() { <del> if (!callback.equals(itemCache.get(hash))) { <add> if (itemCache.containsKey(hash) && !validCallback()) { <ide> callback.getAsyncResult().cancel(); <ide> return true; <ide> } <ide> itemCache.remove(itemHash); <ide> } <ide> } <add> <add> private boolean validCallback() { <add> return callback.equals(itemCache.get(hash)); <add> } <ide> }
Java
agpl-3.0
fd58992219846f2e8987b2ce520e9d82633f1b81
0
imCodePartnerAB/imcms,imCodePartnerAB/imcms,imCodePartnerAB/imcms
package imcode.server.document.index.service.impl; import com.imcode.imcms.api.DocumentLanguage; import com.imcode.imcms.api.DocumentLanguages; import com.imcode.imcms.mapping.DocumentMapper; import imcode.server.document.DocumentDomainObject; import imcode.server.document.index.DocumentIndex; import lombok.SneakyThrows; import org.apache.log4j.Logger; import org.apache.solr.client.solrj.SolrQuery; import org.apache.solr.client.solrj.SolrServer; import org.apache.solr.client.solrj.SolrServerException; import org.apache.solr.client.solrj.response.QueryResponse; import org.apache.solr.common.SolrInputDocument; import org.apache.solr.common.util.DateUtil; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; import java.io.IOException; import java.util.Collection; import java.util.Date; import java.util.List; import java.util.Objects; import java.util.function.Consumer; import java.util.stream.Collectors; /** * Document index service low level operations. * <p> * An instance of this class is thread save. */ // todo: document search might return doc which is not present in db (deleted) - return stub instead @Component public class DocumentIndexServiceOps { private static final Logger logger = Logger.getLogger(DocumentIndexServiceOps.class); private final DocumentMapper documentMapper; private final DocumentIndexer documentIndexer; private final DocumentLanguages documentLanguages; @Autowired public DocumentIndexServiceOps(DocumentMapper documentMapper, DocumentIndexer documentIndexer, DocumentLanguages documentLanguages) { this.documentMapper = documentMapper; this.documentIndexer = documentIndexer; this.documentLanguages = documentLanguages; } private Collection<SolrInputDocument> mkSolrInputDocs(int docId) { return mkSolrInputDocs(docId, documentLanguages.getAll()); } // todo: rewrite using DocumentDTO private Collection<SolrInputDocument> mkSolrInputDocs(int docId, Collection<DocumentLanguage> languages) { Collection<SolrInputDocument> solrInputDocs = languages.stream() .map(language -> (DocumentDomainObject) documentMapper.getDefaultDocument(docId, language)) .filter(Objects::nonNull) .map(doc -> { try { return documentIndexer.index(doc); } catch (Exception e) { logger.error( String.format("Can't create SolrInputDocument from doc %d-%d-%s", doc.getId(), doc.getVersionNo(), doc.getLanguage().getCode()), e); return null; } }) .filter(Objects::nonNull) .collect(Collectors.toList()); if (logger.isTraceEnabled()) { logger.trace( String.format("Created %d solrInputDoc(s) with docId: %d and language(s): %s.", solrInputDocs.size(), docId, languages) ); } return solrInputDocs; } private String mkSolrDocsDeleteQuery(int docId) { return String.format("%s:%d", DocumentIndex.FIELD__META_ID, docId); } public QueryResponse query(SolrServer solrServer, SolrQuery solrQuery) throws SolrServerException { return solrServer.query(solrQuery); } public void addDocsToIndex(SolrServer solrServer, int docId) throws SolrServerException, IOException { Collection<SolrInputDocument> solrInputDocs = mkSolrInputDocs(docId); if (!solrInputDocs.isEmpty()) { solrServer.add(solrInputDocs); solrServer.commit(false, false, true); logger.info(String.format("Added %d solrInputDoc(s) with docId %d into the index.", solrInputDocs.size(), docId)); } } public void deleteDocsFromIndex(SolrServer solrServer, int docId) throws SolrServerException, IOException { String query = mkSolrDocsDeleteQuery(docId); solrServer.deleteByQuery(query); solrServer.commit(false, false, true); logger.info(String.format("Removed document with docId %d from index.", docId)); } public void rebuildIndex(SolrServer solrServer) { rebuildIndex(solrServer, indexRebuildProgress -> { }); } @SneakyThrows private void rebuildIndex(SolrServer solrServer, Consumer<IndexRebuildProgress> progressCallback) { logger.debug("Rebuilding index."); final List<Integer> ids = documentMapper.getAllDocumentIds(); final List<DocumentLanguage> languages = documentLanguages.getAll(); final int docsCount = ids.size(); int docNo = 0; final Date rebuildStartDt = new Date(); final long rebuildStartTime = rebuildStartDt.getTime(); progressCallback.accept(new IndexRebuildProgress(rebuildStartTime, rebuildStartTime, docsCount, docNo)); for (int id : ids) { if (Thread.interrupted()) { solrServer.rollback(); throw new InterruptedException(); } Collection<SolrInputDocument> solrInputDocs = mkSolrInputDocs(id, languages); if (!solrInputDocs.isEmpty()) { solrServer.add(solrInputDocs); logger.debug(String.format("Added input docs [%s] to index.", solrInputDocs)); } docNo += 1; progressCallback.accept(new IndexRebuildProgress(rebuildStartTime, System.currentTimeMillis(), docsCount, docNo)); } logger.debug("Deleting old documents from index."); solrServer.deleteByQuery(String.format("timestamp:{* TO %s}", DateUtil.getThreadLocalDateFormat().format(rebuildStartDt))); solrServer.commit(); logger.debug("Index rebuild is complete."); } }
src/main/java/imcode/server/document/index/service/impl/DocumentIndexServiceOps.java
package imcode.server.document.index.service.impl; import com.imcode.imcms.api.DocumentLanguage; import com.imcode.imcms.api.DocumentLanguages; import com.imcode.imcms.mapping.DocumentMapper; import imcode.server.document.DocumentDomainObject; import imcode.server.document.index.DocumentIndex; import lombok.SneakyThrows; import org.apache.log4j.Logger; import org.apache.solr.client.solrj.SolrQuery; import org.apache.solr.client.solrj.SolrServer; import org.apache.solr.client.solrj.SolrServerException; import org.apache.solr.client.solrj.response.QueryResponse; import org.apache.solr.common.SolrInputDocument; import org.apache.solr.common.util.DateUtil; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; import java.io.IOException; import java.io.UnsupportedEncodingException; import java.net.URLDecoder; import java.nio.charset.StandardCharsets; import java.util.Collection; import java.util.Date; import java.util.List; import java.util.Objects; import java.util.function.Consumer; import java.util.stream.Collectors; /** * Document index service low level operations. * <p> * An instance of this class is thread save. */ // todo: document search might return doc which is not present in db (deleted) - return stub instead @Component public class DocumentIndexServiceOps { private static final Logger logger = Logger.getLogger(DocumentIndexServiceOps.class); private final DocumentMapper documentMapper; private final DocumentIndexer documentIndexer; private final DocumentLanguages documentLanguages; @Autowired public DocumentIndexServiceOps(DocumentMapper documentMapper, DocumentIndexer documentIndexer, DocumentLanguages documentLanguages) { this.documentMapper = documentMapper; this.documentIndexer = documentIndexer; this.documentLanguages = documentLanguages; } private Collection<SolrInputDocument> mkSolrInputDocs(int docId) { return mkSolrInputDocs(docId, documentLanguages.getAll()); } // todo: rewrite using DocumentDTO private Collection<SolrInputDocument> mkSolrInputDocs(int docId, Collection<DocumentLanguage> languages) { Collection<SolrInputDocument> solrInputDocs = languages.stream() .map(language -> (DocumentDomainObject) documentMapper.getDefaultDocument(docId, language)) .filter(Objects::nonNull) .map(doc -> { try { return documentIndexer.index(doc); } catch (Exception e) { logger.error( String.format("Can't create SolrInputDocument from doc %d-%d-%s", doc.getId(), doc.getVersionNo(), doc.getLanguage().getCode()), e); return null; } }) .filter(Objects::nonNull) .collect(Collectors.toList()); if (logger.isTraceEnabled()) { logger.trace( String.format("Created %d solrInputDoc(s) with docId: %d and language(s): %s.", solrInputDocs.size(), docId, languages) ); } return solrInputDocs; } private String mkSolrDocsDeleteQuery(int docId) { return String.format("%s:%d", DocumentIndex.FIELD__META_ID, docId); } public QueryResponse query(SolrServer solrServer, SolrQuery solrQuery) throws SolrServerException { if (logger.isDebugEnabled()) { try { String decodedSolrQuery = URLDecoder.decode(solrQuery.toString(), StandardCharsets.UTF_8.name()); logger.debug(String.format("Searching using SOLr query: %s.", decodedSolrQuery)); } catch (UnsupportedEncodingException e) { // should never happen logger.fatal("Solr query can not be decoded", e); throw new AssertionError(e); } } return solrServer.query(solrQuery); } public void addDocsToIndex(SolrServer solrServer, int docId) throws SolrServerException, IOException { Collection<SolrInputDocument> solrInputDocs = mkSolrInputDocs(docId); if (!solrInputDocs.isEmpty()) { solrServer.add(solrInputDocs); solrServer.commit(false, false, true); logger.info(String.format("Added %d solrInputDoc(s) with docId %d into the index.", solrInputDocs.size(), docId)); } } public void deleteDocsFromIndex(SolrServer solrServer, int docId) throws SolrServerException, IOException { String query = mkSolrDocsDeleteQuery(docId); solrServer.deleteByQuery(query); solrServer.commit(false, false, true); logger.info(String.format("Removed document with docId %d from index.", docId)); } public void rebuildIndex(SolrServer solrServer) { rebuildIndex(solrServer, indexRebuildProgress -> { }); } @SneakyThrows private void rebuildIndex(SolrServer solrServer, Consumer<IndexRebuildProgress> progressCallback) { logger.debug("Rebuilding index."); final List<Integer> ids = documentMapper.getAllDocumentIds(); final List<DocumentLanguage> languages = documentLanguages.getAll(); final int docsCount = ids.size(); int docNo = 0; final Date rebuildStartDt = new Date(); final long rebuildStartTime = rebuildStartDt.getTime(); progressCallback.accept(new IndexRebuildProgress(rebuildStartTime, rebuildStartTime, docsCount, docNo)); for (int id : ids) { if (Thread.interrupted()) { solrServer.rollback(); throw new InterruptedException(); } Collection<SolrInputDocument> solrInputDocs = mkSolrInputDocs(id, languages); if (!solrInputDocs.isEmpty()) { solrServer.add(solrInputDocs); logger.debug(String.format("Added input docs [%s] to index.", solrInputDocs)); } docNo += 1; progressCallback.accept(new IndexRebuildProgress(rebuildStartTime, System.currentTimeMillis(), docsCount, docNo)); } logger.debug("Deleting old documents from index."); solrServer.deleteByQuery(String.format("timestamp:{* TO %s}", DateUtil.getThreadLocalDateFormat().format(rebuildStartDt))); solrServer.commit(); logger.debug("Index rebuild is complete."); } }
IMCMS-233 - Apply new UI to the admin panel and editors: - Clean up.
src/main/java/imcode/server/document/index/service/impl/DocumentIndexServiceOps.java
IMCMS-233 - Apply new UI to the admin panel and editors: - Clean up.
<ide><path>rc/main/java/imcode/server/document/index/service/impl/DocumentIndexServiceOps.java <ide> import org.springframework.stereotype.Component; <ide> <ide> import java.io.IOException; <del>import java.io.UnsupportedEncodingException; <del>import java.net.URLDecoder; <del>import java.nio.charset.StandardCharsets; <ide> import java.util.Collection; <ide> import java.util.Date; <ide> import java.util.List; <ide> } <ide> <ide> public QueryResponse query(SolrServer solrServer, SolrQuery solrQuery) throws SolrServerException { <del> if (logger.isDebugEnabled()) { <del> try { <del> String decodedSolrQuery = URLDecoder.decode(solrQuery.toString(), StandardCharsets.UTF_8.name()); <del> logger.debug(String.format("Searching using SOLr query: %s.", decodedSolrQuery)); <del> } catch (UnsupportedEncodingException e) { <del> // should never happen <del> logger.fatal("Solr query can not be decoded", e); <del> throw new AssertionError(e); <del> } <del> } <del> <ide> return solrServer.query(solrQuery); <ide> } <ide>
Java
mit
error: pathspec 'java/src/main/algorithm/leetcode/CombinationSum.java' did not match any file(s) known to git
f9319ba18fcfab70b9b85ed0e3815eabbc9fa5d5
1
chilejiang1024/codes,chilejiang1024/codes,chilejiang1024/codes,chilejiang1024/codes,chilejiang1024/codes,chilejiang1024/codes
package main.algorithm.leetcode; import org.junit.Test; import java.util.*; import java.util.stream.Collectors; /** * Title : main.algorithm.leetcode <br> * Description : * * * @author chile * @version 1.0 * @date 2019/8/28 16:43 */ public class CombinationSum { class Solution { public List<List<Integer>> combinationSum(int[] candidates, int target) { List<List<Integer>> result = new ArrayList<>(); if (candidates.length == 0) { return result; } quickSort(candidates, 0, candidates.length - 1); findCombinations(candidates, target, result, new ArrayList<>()); Set<String> sums = new HashSet<>(); result = result.stream().filter(l -> { l.sort(Integer::compareTo); String combine = Arrays.toString(l.toArray()); if (sums.contains(combine)) { return false; } else { sums.add(combine); return true; } }).collect(Collectors.toCollection(ArrayList::new)); return result; } private void findCombinations(int[] candidates, int target, List<List<Integer>> result, List<Integer> nums) { int sum = nums.stream().reduce(Integer::sum).orElse(0); if (sum == target) { result.add(nums); return; } if (sum > target) { return; } for (int c : candidates) { List<Integer> nextNums = new ArrayList<>(nums); nextNums.add(c); findCombinations(candidates, target, result, nextNums); } } public void quickSort(int[] nums, int head, int tail) { if (head >= tail) { return; } int l = head, r = tail, pivot = (l + r) / 2; while (l < r) { while (nums[l] < nums[pivot]) { l++; } while (nums[r] >= nums[pivot]) { r--; if (r == l) { break; } } if (l < r) { int t = nums[l]; nums[l] = nums[r]; nums[r] = t; l++; r--; } } quickSort(nums, head, l - 1); quickSort(nums, l + 1, tail); } } @Test public void test1() { int[] nums = { 1, 2, 4, 3, 3 }; new Solution().quickSort(nums, 0, nums.length - 1); System.out.println(Arrays.toString(nums)); } @Test public void test2() { int[] nums = { 2, 3, 6, 7 }; List<List<Integer>> result = new Solution().combinationSum(nums, 7); result.forEach(l -> { l.forEach(System.out::print); System.out.println(); }); } @Test public void test3() { int[] nums = { 1, 2 }; List<List<Integer>> result = new Solution().combinationSum(nums, 1); result.forEach(l -> { l.forEach(System.out::print); System.out.println(); }); } }
java/src/main/algorithm/leetcode/CombinationSum.java
leetcode: combination sum: not a good solution
java/src/main/algorithm/leetcode/CombinationSum.java
leetcode: combination sum: not a good solution
<ide><path>ava/src/main/algorithm/leetcode/CombinationSum.java <add>package main.algorithm.leetcode; <add> <add>import org.junit.Test; <add> <add>import java.util.*; <add>import java.util.stream.Collectors; <add> <add>/** <add> * Title : main.algorithm.leetcode <br> <add> * Description : <add> * <add> * <add> * @author chile <add> * @version 1.0 <add> * @date 2019/8/28 16:43 <add> */ <add>public class CombinationSum { <add> <add> class Solution { <add> public List<List<Integer>> combinationSum(int[] candidates, int target) { <add> List<List<Integer>> result = new ArrayList<>(); <add> <add> if (candidates.length == 0) { <add> return result; <add> } <add> <add> quickSort(candidates, 0, candidates.length - 1); <add> <add> findCombinations(candidates, target, result, new ArrayList<>()); <add> <add> Set<String> sums = new HashSet<>(); <add> result = result.stream().filter(l -> { <add> l.sort(Integer::compareTo); <add> String combine = Arrays.toString(l.toArray()); <add> if (sums.contains(combine)) { <add> return false; <add> } else { <add> sums.add(combine); <add> return true; <add> } <add> }).collect(Collectors.toCollection(ArrayList::new)); <add> <add> return result; <add> } <add> <add> private void findCombinations(int[] candidates, int target, List<List<Integer>> result, List<Integer> nums) { <add> int sum = nums.stream().reduce(Integer::sum).orElse(0); <add> <add> if (sum == target) { <add> result.add(nums); <add> return; <add> } <add> <add> if (sum > target) { <add> return; <add> } <add> <add> for (int c : candidates) { <add> List<Integer> nextNums = new ArrayList<>(nums); <add> nextNums.add(c); <add> findCombinations(candidates, target, result, nextNums); <add> } <add> <add> } <add> <add> public void quickSort(int[] nums, int head, int tail) { <add> if (head >= tail) { <add> return; <add> } <add> <add> int l = head, r = tail, pivot = (l + r) / 2; <add> while (l < r) { <add> while (nums[l] < nums[pivot]) { <add> l++; <add> } <add> while (nums[r] >= nums[pivot]) { <add> r--; <add> if (r == l) { <add> break; <add> } <add> } <add> if (l < r) { <add> int t = nums[l]; <add> nums[l] = nums[r]; <add> nums[r] = t; <add> l++; <add> r--; <add> } <add> } <add> quickSort(nums, head, l - 1); <add> quickSort(nums, l + 1, tail); <add> } <add> } <add> <add> @Test <add> public void test1() { <add> int[] nums = { 1, 2, 4, 3, 3 }; <add> new Solution().quickSort(nums, 0, nums.length - 1); <add> System.out.println(Arrays.toString(nums)); <add> } <add> <add> @Test <add> public void test2() { <add> int[] nums = { 2, 3, 6, 7 }; <add> List<List<Integer>> result = new Solution().combinationSum(nums, 7); <add> result.forEach(l -> { <add> l.forEach(System.out::print); <add> System.out.println(); <add> }); <add> } <add> <add> @Test <add> public void test3() { <add> int[] nums = { 1, 2 }; <add> List<List<Integer>> result = new Solution().combinationSum(nums, 1); <add> result.forEach(l -> { <add> l.forEach(System.out::print); <add> System.out.println(); <add> }); <add> } <add>}
Java
mit
ad4f0bb1f607204641fed1562b0617b5bbc93f7d
0
sake/bouncycastle-java
package org.bouncycastle.crypto.encodings; import org.bouncycastle.crypto.AsymmetricBlockCipher; import org.bouncycastle.crypto.CipherParameters; import org.bouncycastle.crypto.Digest; import org.bouncycastle.crypto.InvalidCipherTextException; import org.bouncycastle.crypto.digests.SHA1Digest; import org.bouncycastle.crypto.params.ParametersWithRandom; import java.security.SecureRandom; /** * Optimal Asymmetric Encryption Padding (OAEP) - see PKCS 1 V 2. */ public class OAEPEncoding implements AsymmetricBlockCipher { private byte[] defHash; private Digest hash; private Digest mgf1Hash; private AsymmetricBlockCipher engine; private SecureRandom random; private boolean forEncryption; public OAEPEncoding( AsymmetricBlockCipher cipher) { this(cipher, new SHA1Digest(), null); } public OAEPEncoding( AsymmetricBlockCipher cipher, Digest hash) { this(cipher, hash, null); } public OAEPEncoding( AsymmetricBlockCipher cipher, Digest hash, byte[] encodingParams) { this(cipher, hash, hash, encodingParams); } public OAEPEncoding( AsymmetricBlockCipher cipher, Digest hash, Digest mgf1Hash, byte[] encodingParams) { this.engine = cipher; this.hash = hash; this.mgf1Hash = mgf1Hash; this.defHash = new byte[hash.getDigestSize()]; if (encodingParams != null) { hash.update(encodingParams, 0, encodingParams.length); } hash.doFinal(defHash, 0); } public AsymmetricBlockCipher getUnderlyingCipher() { return engine; } public void init( boolean forEncryption, CipherParameters param) { if (param instanceof ParametersWithRandom) { ParametersWithRandom rParam = (ParametersWithRandom)param; this.random = rParam.getRandom(); } else { this.random = new SecureRandom(); } engine.init(forEncryption, param); this.forEncryption = forEncryption; } public int getInputBlockSize() { int baseBlockSize = engine.getInputBlockSize(); if (forEncryption) { return baseBlockSize - 1 - 2 * defHash.length; } else { return baseBlockSize; } } public int getOutputBlockSize() { int baseBlockSize = engine.getOutputBlockSize(); if (forEncryption) { return baseBlockSize; } else { return baseBlockSize - 1 - 2 * defHash.length; } } public byte[] processBlock( byte[] in, int inOff, int inLen) throws InvalidCipherTextException { if (forEncryption) { return encodeBlock(in, inOff, inLen); } else { return decodeBlock(in, inOff, inLen); } } public byte[] encodeBlock( byte[] in, int inOff, int inLen) throws InvalidCipherTextException { byte[] block = new byte[getInputBlockSize() + 1 + 2 * defHash.length]; // // copy in the message // System.arraycopy(in, inOff, block, block.length - inLen, inLen); // // add sentinel // block[block.length - inLen - 1] = 0x01; // // as the block is already zeroed - there's no need to add PS (the >= 0 pad of 0) // // // add the hash of the encoding params. // System.arraycopy(defHash, 0, block, defHash.length, defHash.length); // // generate the seed. // byte[] seed = new byte[defHash.length]; random.nextBytes(seed); // // mask the message block. // byte[] mask = maskGeneratorFunction1(seed, 0, seed.length, block.length - defHash.length); for (int i = defHash.length; i != block.length; i++) { block[i] ^= mask[i - defHash.length]; } // // add in the seed // System.arraycopy(seed, 0, block, 0, defHash.length); // // mask the seed. // mask = maskGeneratorFunction1( block, defHash.length, block.length - defHash.length, defHash.length); for (int i = 0; i != defHash.length; i++) { block[i] ^= mask[i]; } return engine.processBlock(block, 0, block.length); } /** * @exception InvalidCipherTextException if the decrypted block turns out to * be badly formatted. */ public byte[] decodeBlock( byte[] in, int inOff, int inLen) throws InvalidCipherTextException { byte[] data = engine.processBlock(in, inOff, inLen); byte[] block; // // as we may have zeros in our leading bytes for the block we produced // on encryption, we need to make sure our decrypted block comes back // the same size. // if (data.length < engine.getOutputBlockSize()) { block = new byte[engine.getOutputBlockSize()]; System.arraycopy(data, 0, block, block.length - data.length, data.length); } else { block = data; } if (block.length < (2 * defHash.length) + 1) { throw new InvalidCipherTextException("data too short"); } // // unmask the seed. // byte[] mask = maskGeneratorFunction1( block, defHash.length, block.length - defHash.length, defHash.length); for (int i = 0; i != defHash.length; i++) { block[i] ^= mask[i]; } // // unmask the message block. // mask = maskGeneratorFunction1(block, 0, defHash.length, block.length - defHash.length); for (int i = defHash.length; i != block.length; i++) { block[i] ^= mask[i - defHash.length]; } // // check the hash of the encoding params. // for (int i = 0; i != defHash.length; i++) { if (defHash[i] != block[defHash.length + i]) { throw new InvalidCipherTextException("data hash wrong"); } } // // find the data block // int start; for (start = 2 * defHash.length; start != block.length; start++) { if (block[start] != 0) { break; } } if (start >= (block.length - 1) || block[start] != 1) { throw new InvalidCipherTextException("data start wrong " + start); } start++; // // extract the data block // byte[] output = new byte[block.length - start]; System.arraycopy(block, start, output, 0, output.length); return output; } /** * int to octet string. */ private void ItoOSP( int i, byte[] sp) { sp[0] = (byte)(i >>> 24); sp[1] = (byte)(i >>> 16); sp[2] = (byte)(i >>> 8); sp[3] = (byte)(i >>> 0); } /** * mask generator function, as described in PKCS1v2. */ private byte[] maskGeneratorFunction1( byte[] Z, int zOff, int zLen, int length) { byte[] mask = new byte[length]; byte[] hashBuf = new byte[mgf1Hash.getDigestSize()]; byte[] C = new byte[4]; int counter = 0; hash.reset(); do { ItoOSP(counter, C); mgf1Hash.update(Z, zOff, zLen); mgf1Hash.update(C, 0, C.length); mgf1Hash.doFinal(hashBuf, 0); System.arraycopy(hashBuf, 0, mask, counter * hashBuf.length, hashBuf.length); } while (++counter < (length / hashBuf.length)); if ((counter * hashBuf.length) < length) { ItoOSP(counter, C); mgf1Hash.update(Z, zOff, zLen); mgf1Hash.update(C, 0, C.length); mgf1Hash.doFinal(hashBuf, 0); System.arraycopy(hashBuf, 0, mask, counter * hashBuf.length, mask.length - (counter * hashBuf.length)); } return mask; } }
src/org/bouncycastle/crypto/encodings/OAEPEncoding.java
package org.bouncycastle.crypto.encodings; import org.bouncycastle.crypto.AsymmetricBlockCipher; import org.bouncycastle.crypto.CipherParameters; import org.bouncycastle.crypto.Digest; import org.bouncycastle.crypto.InvalidCipherTextException; import org.bouncycastle.crypto.digests.SHA1Digest; import org.bouncycastle.crypto.params.ParametersWithRandom; import java.security.SecureRandom; /** * Optimal Asymmetric Encryption Padding (OAEP) - see PKCS 1 V 2. */ public class OAEPEncoding implements AsymmetricBlockCipher { private byte[] defHash; private Digest hash; private Digest mgf1Hash; private AsymmetricBlockCipher engine; private SecureRandom random; private boolean forEncryption; public OAEPEncoding( AsymmetricBlockCipher cipher) { this(cipher, new SHA1Digest(), null); } public OAEPEncoding( AsymmetricBlockCipher cipher, Digest hash) { this(cipher, hash, null); } public OAEPEncoding( AsymmetricBlockCipher cipher, Digest hash, byte[] encodingParams) { this(cipher, hash, hash, encodingParams); } public OAEPEncoding( AsymmetricBlockCipher cipher, Digest hash, Digest mgf1Hash, byte[] encodingParams) { this.engine = cipher; this.hash = hash; this.mgf1Hash = mgf1Hash; this.defHash = new byte[hash.getDigestSize()]; if (encodingParams != null) { hash.update(encodingParams, 0, encodingParams.length); } hash.doFinal(defHash, 0); } public AsymmetricBlockCipher getUnderlyingCipher() { return engine; } public void init( boolean forEncryption, CipherParameters param) { if (param instanceof ParametersWithRandom) { ParametersWithRandom rParam = (ParametersWithRandom)param; this.random = rParam.getRandom(); } else { this.random = new SecureRandom(); } engine.init(forEncryption, param); this.forEncryption = forEncryption; } public int getInputBlockSize() { int baseBlockSize = engine.getInputBlockSize(); if (forEncryption) { return baseBlockSize - 1 - 2 * defHash.length; } else { return baseBlockSize; } } public int getOutputBlockSize() { int baseBlockSize = engine.getOutputBlockSize(); if (forEncryption) { return baseBlockSize; } else { return baseBlockSize - 1 - 2 * defHash.length; } } public byte[] processBlock( byte[] in, int inOff, int inLen) throws InvalidCipherTextException { if (forEncryption) { return encodeBlock(in, inOff, inLen); } else { return decodeBlock(in, inOff, inLen); } } public byte[] encodeBlock( byte[] in, int inOff, int inLen) throws InvalidCipherTextException { byte[] block = new byte[getInputBlockSize() + 1 + 2 * defHash.length]; // // copy in the message // System.arraycopy(in, inOff, block, block.length - inLen, inLen); // // add sentinel // block[block.length - inLen - 1] = 0x01; // // as the block is already zeroed - there's no need to add PS (the >= 0 pad of 0) // // // add the hash of the encoding params. // System.arraycopy(defHash, 0, block, defHash.length, defHash.length); // // generate the seed. // byte[] seed = new byte[defHash.length]; random.nextBytes(seed); // // mask the message block. // byte[] mask = maskGeneratorFunction1(seed, 0, seed.length, block.length - defHash.length); for (int i = defHash.length; i != block.length; i++) { block[i] ^= mask[i - defHash.length]; } // // add in the seed // System.arraycopy(seed, 0, block, 0, defHash.length); // // mask the seed. // mask = maskGeneratorFunction1( block, defHash.length, block.length - defHash.length, defHash.length); for (int i = 0; i != defHash.length; i++) { block[i] ^= mask[i]; } return engine.processBlock(block, 0, block.length); } /** * @exception InvalidCipherTextException if the decrypted block turns out to * be badly formatted. */ public byte[] decodeBlock( byte[] in, int inOff, int inLen) throws InvalidCipherTextException { byte[] data = engine.processBlock(in, inOff, inLen); byte[] block; // // as we may have zeros in our leading bytes for the block we produced // on encryption, we need to make sure our decrypted block comes back // the same size. // if (data.length < engine.getOutputBlockSize()) { block = new byte[engine.getOutputBlockSize()]; System.arraycopy(data, 0, block, block.length - data.length, data.length); } else { block = data; } if (block.length < (2 * defHash.length) + 1) { throw new InvalidCipherTextException("data too short"); } // // unmask the seed. // byte[] mask = maskGeneratorFunction1( block, defHash.length, block.length - defHash.length, defHash.length); for (int i = 0; i != defHash.length; i++) { block[i] ^= mask[i]; } // // unmask the message block. // mask = maskGeneratorFunction1(block, 0, defHash.length, block.length - defHash.length); for (int i = defHash.length; i != block.length; i++) { block[i] ^= mask[i - defHash.length]; } // // check the hash of the encoding params. // for (int i = 0; i != defHash.length; i++) { if (defHash[i] != block[defHash.length + i]) { throw new InvalidCipherTextException("data hash wrong"); } } // // find the data block // int start; for (start = 2 * defHash.length; start != block.length; start++) { if (block[start] == 1 || block[start] != 0) { break; } } if (start >= (block.length - 1) || block[start] != 1) { throw new InvalidCipherTextException("data start wrong " + start); } start++; // // extract the data block // byte[] output = new byte[block.length - start]; System.arraycopy(block, start, output, 0, output.length); return output; } /** * int to octet string. */ private void ItoOSP( int i, byte[] sp) { sp[0] = (byte)(i >>> 24); sp[1] = (byte)(i >>> 16); sp[2] = (byte)(i >>> 8); sp[3] = (byte)(i >>> 0); } /** * mask generator function, as described in PKCS1v2. */ private byte[] maskGeneratorFunction1( byte[] Z, int zOff, int zLen, int length) { byte[] mask = new byte[length]; byte[] hashBuf = new byte[mgf1Hash.getDigestSize()]; byte[] C = new byte[4]; int counter = 0; hash.reset(); do { ItoOSP(counter, C); mgf1Hash.update(Z, zOff, zLen); mgf1Hash.update(C, 0, C.length); mgf1Hash.doFinal(hashBuf, 0); System.arraycopy(hashBuf, 0, mask, counter * hashBuf.length, hashBuf.length); } while (++counter < (length / hashBuf.length)); if ((counter * hashBuf.length) < length) { ItoOSP(counter, C); mgf1Hash.update(Z, zOff, zLen); mgf1Hash.update(C, 0, C.length); mgf1Hash.doFinal(hashBuf, 0); System.arraycopy(hashBuf, 0, mask, counter * hashBuf.length, mask.length - (counter * hashBuf.length)); } return mask; } }
simplified - BJA-171
src/org/bouncycastle/crypto/encodings/OAEPEncoding.java
simplified - BJA-171
<ide><path>rc/org/bouncycastle/crypto/encodings/OAEPEncoding.java <ide> <ide> for (start = 2 * defHash.length; start != block.length; start++) <ide> { <del> if (block[start] == 1 || block[start] != 0) <add> if (block[start] != 0) <ide> { <ide> break; <ide> }
Java
agpl-3.0
5d42ca93d03c2cea55a3a8817cd9010a01ba2aa8
0
mtrberzi/smt-nes
package io.lp0onfire.smtnes.generators.cpu; import static org.junit.Assert.assertTrue; import java.io.IOException; import java.util.Arrays; import java.util.HashSet; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Set; import org.apache.commons.lang3.StringUtils; import org.junit.Test; import io.lp0onfire.smtnes.CodeGenerator; import io.lp0onfire.smtnes.PageHandler; import io.lp0onfire.smtnes.Z3; import io.lp0onfire.smtnes.StateVariableRegistry; import io.lp0onfire.smtnes.smt2.ArrayDeclaration; import io.lp0onfire.smtnes.smt2.ArrayReadExpression; import io.lp0onfire.smtnes.smt2.Assertion; import io.lp0onfire.smtnes.smt2.BinaryConstant; import io.lp0onfire.smtnes.smt2.BitVectorDeclaration; import io.lp0onfire.smtnes.smt2.EqualsExpression; import io.lp0onfire.smtnes.smt2.Numeral; import io.lp0onfire.smtnes.smt2.SExpression; import io.lp0onfire.smtnes.smt2.Symbol; public class TestCPURAMHandler { // set up (prefix)ChipSelect, CPU_{RAM, AddressBus, WriteEnable, DataOut} // use InitCPURAM to get CPU_RAM, BusDriver to get CPU_{addr,we,do}, // ChipSelectDriver to get (prefix)ChipSelect private static BinaryConstant ramInitialValue = new BinaryConstant("10100101"); class InitCPURAM implements CodeGenerator { @Override public Set<String> getStateVariablesRead() { return new HashSet<>(); } @Override public Set<String> getStateVariablesWritten() { return new HashSet<String>(Arrays.asList(new String[]{ "CPU_RAM" })); } @Override public List<SExpression> generateCode(Map<String, Symbol> inputs, Map<String, Symbol> outputs) { List<SExpression> exprs = new LinkedList<>(); // declare RAM array and initialize to a test pattern Symbol RAM = outputs.get("CPU_RAM"); exprs.add(new ArrayDeclaration(RAM, new Numeral("11"), new Numeral("8"))); for (int i = 0; i < 2048; ++i) { String bits = Integer.toBinaryString(i); // zero-pad on the left int zeroCount = 11 - bits.length(); BinaryConstant index = new BinaryConstant(StringUtils.repeat('0', zeroCount) + bits); exprs.add(new Assertion(new EqualsExpression( new ArrayReadExpression(RAM, index), ramInitialValue))); } return exprs; } } class ChipSelectDriver implements CodeGenerator { private final String csName; private final BinaryConstant csValue; public ChipSelectDriver(String chipSelectPrefix, BinaryConstant chipSelectValue) { csName = chipSelectPrefix + "ChipSelect"; csValue = chipSelectValue; } @Override public Set<String> getStateVariablesRead() { return new HashSet<>(); } @Override public Set<String> getStateVariablesWritten() { return new HashSet<String>(Arrays.asList(new String[]{ csName })); } @Override public List<SExpression> generateCode(Map<String, Symbol> inputs, Map<String, Symbol> outputs) { List<SExpression> exprs = new LinkedList<>(); Symbol CS = outputs.get(csName); exprs.add(new BitVectorDeclaration(CS, new Numeral("1"))); exprs.add(new Assertion(new EqualsExpression(CS, csValue))); return exprs; } } class VerifyRAMContents implements CodeGenerator { private final BinaryConstant readAddr; private final BinaryConstant expectedValue; public VerifyRAMContents(BinaryConstant readAddr, BinaryConstant expectedValue) { this.readAddr = readAddr; this.expectedValue = expectedValue; } @Override public Set<String> getStateVariablesRead() { return new HashSet<String>(Arrays.asList(new String[]{ "CPU_RAM" })); } @Override public Set<String> getStateVariablesWritten() { return new HashSet<>(); } @Override public List<SExpression> generateCode(Map<String, Symbol> inputs, Map<String, Symbol> outputs) { List<SExpression> exprs = new LinkedList<>(); Symbol RAM = inputs.get("CPU_RAM"); exprs.add(new Assertion(new EqualsExpression(new ArrayReadExpression(RAM, readAddr), expectedValue))); return exprs; } } class VerifyDataOut implements CodeGenerator { private final String handlerPrefix; private final BinaryConstant expectedValue; public VerifyDataOut(String handlerPrefix, BinaryConstant expectedValue) { this.handlerPrefix = handlerPrefix; this.expectedValue = expectedValue; } @Override public Set<String> getStateVariablesRead() { return new HashSet<String>(Arrays.asList(new String[]{ handlerPrefix + "DataOut" })); } @Override public Set<String> getStateVariablesWritten() { return new HashSet<>(); } @Override public List<SExpression> generateCode(Map<String, Symbol> inputs, Map<String, Symbol> outputs) { List<SExpression> exprs = new LinkedList<>(); Symbol DataOut = inputs.get(handlerPrefix + "DataOut"); exprs.add(new Assertion(new EqualsExpression(DataOut, expectedValue))); return exprs; } } @Test(timeout=5000) public void testSyntaxOK() throws IOException { List<SExpression> exprs = new LinkedList<>(); StateVariableRegistry reg = new StateVariableRegistry(); PageHandler ramHandler = new CPURAMHandler(); CodeGenerator ramInit = new InitCPURAM(); CodeGenerator csDrive = new ChipSelectDriver(ramHandler.getHandlerPrefix(), new BinaryConstant("0")); CodeGenerator busDrive = new BusDriver( new BinaryConstant("0000" + "0" + "00000000000"), new BinaryConstant("0"), new BinaryConstant("00000000")); exprs.addAll(reg.apply(ramInit)); exprs.addAll(reg.apply(csDrive)); exprs.addAll(reg.apply(busDrive)); exprs.addAll(reg.apply(ramHandler)); try(Z3 z3 = new Z3()) { z3.open(); for(SExpression expr : exprs) { z3.write(expr.toString()); } assertTrue(z3.checkSat()); } } @Test(timeout=5000) public void testCSLow_MemoryUnchanged() throws IOException { List<SExpression> exprs = new LinkedList<>(); StateVariableRegistry reg = new StateVariableRegistry(); String addrLowBits = "00000000000"; BinaryConstant targetAddr = new BinaryConstant("0000" + "0" + addrLowBits); BinaryConstant ramAddr = new BinaryConstant(addrLowBits); PageHandler ramHandler = new CPURAMHandler(); CodeGenerator ramInit = new InitCPURAM(); CodeGenerator csDrive = new ChipSelectDriver(ramHandler.getHandlerPrefix(), new BinaryConstant("0")); CodeGenerator busDrive = new BusDriver( targetAddr, new BinaryConstant("1"), new BinaryConstant("11111111")); CodeGenerator verifier = new VerifyRAMContents(ramAddr, ramInitialValue); // If CS is low, the memory should not change. exprs.addAll(reg.apply(ramInit)); exprs.addAll(reg.apply(csDrive)); exprs.addAll(reg.apply(busDrive)); exprs.addAll(reg.apply(verifier)); exprs.addAll(reg.apply(ramHandler)); exprs.addAll(reg.apply(verifier)); try(Z3 z3 = new Z3()) { z3.open(); for(SExpression expr : exprs) { z3.write(expr.toString()); } assertTrue(z3.checkSat()); } } @Test(timeout=5000) public void testReadMemory() throws IOException { List<SExpression> exprs = new LinkedList<>(); StateVariableRegistry reg = new StateVariableRegistry(); String addrLowBits = "00000000000"; BinaryConstant targetAddr = new BinaryConstant("0000" + "0" + addrLowBits); BinaryConstant ramAddr = new BinaryConstant(addrLowBits); PageHandler ramHandler = new CPURAMHandler(); CodeGenerator ramInit = new InitCPURAM(); CodeGenerator csDrive = new ChipSelectDriver(ramHandler.getHandlerPrefix(), new BinaryConstant("1")); CodeGenerator busDrive = new BusDriver( targetAddr, new BinaryConstant("0"), new BinaryConstant("11111111")); CodeGenerator memoryVerifier = new VerifyRAMContents(ramAddr, ramInitialValue); CodeGenerator dataVerifier = new VerifyDataOut(ramHandler.getHandlerPrefix(), ramInitialValue); // If CS is high and WE is low, the memory should not change // and the value of DataOut should be the value in memory at that address. exprs.addAll(reg.apply(ramInit)); exprs.addAll(reg.apply(csDrive)); exprs.addAll(reg.apply(busDrive)); exprs.addAll(reg.apply(memoryVerifier)); exprs.addAll(reg.apply(ramHandler)); exprs.addAll(reg.apply(memoryVerifier)); exprs.addAll(reg.apply(dataVerifier)); try(Z3 z3 = new Z3()) { z3.open(); for(SExpression expr : exprs) { z3.write(expr.toString()); } assertTrue(z3.checkSat()); } } @Test(timeout=5000) public void testWriteMemory() throws IOException { List<SExpression> exprs = new LinkedList<>(); StateVariableRegistry reg = new StateVariableRegistry(); String addrLowBits = "00000000000"; BinaryConstant targetAddr = new BinaryConstant("0000" + "0" + addrLowBits); BinaryConstant ramAddr = new BinaryConstant(addrLowBits); BinaryConstant targetValue = new BinaryConstant("11111111"); PageHandler ramHandler = new CPURAMHandler(); CodeGenerator ramInit = new InitCPURAM(); CodeGenerator csDrive = new ChipSelectDriver(ramHandler.getHandlerPrefix(), new BinaryConstant("1")); CodeGenerator busDrive = new BusDriver( targetAddr, new BinaryConstant("1"), targetValue); CodeGenerator memoryVerifier1 = new VerifyRAMContents(ramAddr, ramInitialValue); CodeGenerator memoryVerifier2 = new VerifyRAMContents(ramAddr, targetValue); // If CS is high and WE is 1, the memory should be updated to reflect the written value. exprs.addAll(reg.apply(ramInit)); exprs.addAll(reg.apply(csDrive)); exprs.addAll(reg.apply(busDrive)); exprs.addAll(reg.apply(memoryVerifier1)); exprs.addAll(reg.apply(ramHandler)); exprs.addAll(reg.apply(memoryVerifier2)); try(Z3 z3 = new Z3()) { z3.open(); for(SExpression expr : exprs) { z3.write(expr.toString()); } assertTrue(z3.checkSat()); } } }
src/intTest/java/io/lp0onfire/smtnes/generators/cpu/TestCPURAMHandler.java
package io.lp0onfire.smtnes.generators.cpu; import static org.junit.Assert.assertTrue; import java.io.IOException; import java.util.Arrays; import java.util.HashSet; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Set; import org.apache.commons.lang3.StringUtils; import org.junit.Test; import io.lp0onfire.smtnes.CodeGenerator; import io.lp0onfire.smtnes.PageHandler; import io.lp0onfire.smtnes.Z3; import io.lp0onfire.smtnes.StateVariableRegistry; import io.lp0onfire.smtnes.smt2.ArrayDeclaration; import io.lp0onfire.smtnes.smt2.ArrayReadExpression; import io.lp0onfire.smtnes.smt2.Assertion; import io.lp0onfire.smtnes.smt2.BinaryConstant; import io.lp0onfire.smtnes.smt2.BitVectorDeclaration; import io.lp0onfire.smtnes.smt2.EqualsExpression; import io.lp0onfire.smtnes.smt2.Numeral; import io.lp0onfire.smtnes.smt2.SExpression; import io.lp0onfire.smtnes.smt2.Symbol; public class TestCPURAMHandler { // set up (prefix)ChipSelect, CPU_{RAM, AddressBus, WriteEnable, DataOut} // use InitCPURAM to get CPU_RAM, BusDriver to get CPU_{addr,we,do}, // ChipSelectDriver to get (prefix)ChipSelect private static BinaryConstant ramInitialValue = new BinaryConstant("10100101"); class InitCPURAM implements CodeGenerator { @Override public Set<String> getStateVariablesRead() { return new HashSet<>(); } @Override public Set<String> getStateVariablesWritten() { return new HashSet<String>(Arrays.asList(new String[]{ "CPU_RAM" })); } @Override public List<SExpression> generateCode(Map<String, Symbol> inputs, Map<String, Symbol> outputs) { List<SExpression> exprs = new LinkedList<>(); // declare RAM array and initialize to a test pattern Symbol RAM = outputs.get("CPU_RAM"); exprs.add(new ArrayDeclaration(RAM, new Numeral("11"), new Numeral("8"))); for (int i = 0; i < 2048; ++i) { String bits = Integer.toBinaryString(i); // zero-pad on the left int zeroCount = 11 - bits.length(); BinaryConstant index = new BinaryConstant(StringUtils.repeat('0', zeroCount) + bits); exprs.add(new Assertion(new EqualsExpression( new ArrayReadExpression(RAM, index), ramInitialValue))); } return exprs; } } class ChipSelectDriver implements CodeGenerator { private final String csName; private final BinaryConstant csValue; public ChipSelectDriver(String chipSelectPrefix, BinaryConstant chipSelectValue) { csName = chipSelectPrefix + "ChipSelect"; csValue = chipSelectValue; } @Override public Set<String> getStateVariablesRead() { return new HashSet<>(); } @Override public Set<String> getStateVariablesWritten() { return new HashSet<String>(Arrays.asList(new String[]{ csName })); } @Override public List<SExpression> generateCode(Map<String, Symbol> inputs, Map<String, Symbol> outputs) { List<SExpression> exprs = new LinkedList<>(); Symbol CS = outputs.get(csName); exprs.add(new BitVectorDeclaration(CS, new Numeral("1"))); exprs.add(new Assertion(new EqualsExpression(CS, csValue))); return exprs; } } class VerifyRAMContents implements CodeGenerator { private final BinaryConstant readAddr; private final BinaryConstant expectedValue; public VerifyRAMContents(BinaryConstant readAddr, BinaryConstant expectedValue) { this.readAddr = readAddr; this.expectedValue = expectedValue; } @Override public Set<String> getStateVariablesRead() { return new HashSet<String>(Arrays.asList(new String[]{ "CPU_RAM" })); } @Override public Set<String> getStateVariablesWritten() { return new HashSet<>(); } @Override public List<SExpression> generateCode(Map<String, Symbol> inputs, Map<String, Symbol> outputs) { List<SExpression> exprs = new LinkedList<>(); Symbol RAM = inputs.get("CPU_RAM"); exprs.add(new Assertion(new EqualsExpression(new ArrayReadExpression(RAM, readAddr), expectedValue))); return exprs; } } @Test(timeout=5000) public void testSyntaxOK() throws IOException { List<SExpression> exprs = new LinkedList<>(); StateVariableRegistry reg = new StateVariableRegistry(); PageHandler ramHandler = new CPURAMHandler(); CodeGenerator ramInit = new InitCPURAM(); CodeGenerator csDrive = new ChipSelectDriver(ramHandler.getHandlerPrefix(), new BinaryConstant("0")); CodeGenerator busDrive = new BusDriver( new BinaryConstant("0000" + "0" + "00000000000"), new BinaryConstant("0"), new BinaryConstant("00000000")); exprs.addAll(reg.apply(ramInit)); exprs.addAll(reg.apply(csDrive)); exprs.addAll(reg.apply(busDrive)); exprs.addAll(reg.apply(ramHandler)); try(Z3 z3 = new Z3()) { z3.open(); for(SExpression expr : exprs) { z3.write(expr.toString()); } assertTrue(z3.checkSat()); } } @Test(timeout=5000) public void testCSLow_MemoryUnchanged() throws IOException { List<SExpression> exprs = new LinkedList<>(); StateVariableRegistry reg = new StateVariableRegistry(); String addrLowBits = "00000000000"; BinaryConstant targetAddr = new BinaryConstant("0000" + "0" + addrLowBits); BinaryConstant ramAddr = new BinaryConstant(addrLowBits); PageHandler ramHandler = new CPURAMHandler(); CodeGenerator ramInit = new InitCPURAM(); CodeGenerator csDrive = new ChipSelectDriver(ramHandler.getHandlerPrefix(), new BinaryConstant("0")); CodeGenerator busDrive = new BusDriver( targetAddr, new BinaryConstant("1"), new BinaryConstant("11111111")); CodeGenerator verifier = new VerifyRAMContents(ramAddr, ramInitialValue); // If CS is low, the memory should not change. exprs.addAll(reg.apply(ramInit)); exprs.addAll(reg.apply(csDrive)); exprs.addAll(reg.apply(busDrive)); exprs.addAll(reg.apply(verifier)); exprs.addAll(reg.apply(ramHandler)); exprs.addAll(reg.apply(verifier)); try(Z3 z3 = new Z3()) { z3.open(); for(SExpression expr : exprs) { z3.write(expr.toString()); } assertTrue(z3.checkSat()); } } }
CPU RAM read and write tests
src/intTest/java/io/lp0onfire/smtnes/generators/cpu/TestCPURAMHandler.java
CPU RAM read and write tests
<ide><path>rc/intTest/java/io/lp0onfire/smtnes/generators/cpu/TestCPURAMHandler.java <ide> <ide> } <ide> <add> class VerifyDataOut implements CodeGenerator { <add> private final String handlerPrefix; <add> private final BinaryConstant expectedValue; <add> <add> public VerifyDataOut(String handlerPrefix, BinaryConstant expectedValue) { <add> this.handlerPrefix = handlerPrefix; <add> this.expectedValue = expectedValue; <add> } <add> <add> @Override <add> public Set<String> getStateVariablesRead() { <add> return new HashSet<String>(Arrays.asList(new String[]{ <add> handlerPrefix + "DataOut" <add> })); <add> } <add> <add> @Override <add> public Set<String> getStateVariablesWritten() { <add> return new HashSet<>(); <add> } <add> <add> @Override <add> public List<SExpression> generateCode(Map<String, Symbol> inputs, <add> Map<String, Symbol> outputs) { <add> List<SExpression> exprs = new LinkedList<>(); <add> <add> Symbol DataOut = inputs.get(handlerPrefix + "DataOut"); <add> exprs.add(new Assertion(new EqualsExpression(DataOut, expectedValue))); <add> <add> return exprs; <add> } <add> } <add> <ide> @Test(timeout=5000) <ide> public void testSyntaxOK() throws IOException { <ide> List<SExpression> exprs = new LinkedList<>(); <ide> } <ide> } <ide> <add> @Test(timeout=5000) <add> public void testReadMemory() throws IOException { <add> List<SExpression> exprs = new LinkedList<>(); <add> StateVariableRegistry reg = new StateVariableRegistry(); <add> <add> String addrLowBits = "00000000000"; <add> <add> BinaryConstant targetAddr = new BinaryConstant("0000" + "0" + addrLowBits); <add> BinaryConstant ramAddr = new BinaryConstant(addrLowBits); <add> <add> PageHandler ramHandler = new CPURAMHandler(); <add> CodeGenerator ramInit = new InitCPURAM(); <add> CodeGenerator csDrive = new ChipSelectDriver(ramHandler.getHandlerPrefix(), new BinaryConstant("1")); <add> CodeGenerator busDrive = new BusDriver( <add> targetAddr, new BinaryConstant("0"), new BinaryConstant("11111111")); <add> CodeGenerator memoryVerifier = new VerifyRAMContents(ramAddr, ramInitialValue); <add> CodeGenerator dataVerifier = new VerifyDataOut(ramHandler.getHandlerPrefix(), ramInitialValue); <add> <add> // If CS is high and WE is low, the memory should not change <add> // and the value of DataOut should be the value in memory at that address. <add> <add> exprs.addAll(reg.apply(ramInit)); <add> exprs.addAll(reg.apply(csDrive)); <add> exprs.addAll(reg.apply(busDrive)); <add> exprs.addAll(reg.apply(memoryVerifier)); <add> exprs.addAll(reg.apply(ramHandler)); <add> exprs.addAll(reg.apply(memoryVerifier)); <add> exprs.addAll(reg.apply(dataVerifier)); <add> <add> try(Z3 z3 = new Z3()) { <add> z3.open(); <add> for(SExpression expr : exprs) { <add> z3.write(expr.toString()); <add> } <add> assertTrue(z3.checkSat()); <add> } <add> } <add> <add> @Test(timeout=5000) <add> public void testWriteMemory() throws IOException { <add> List<SExpression> exprs = new LinkedList<>(); <add> StateVariableRegistry reg = new StateVariableRegistry(); <add> <add> String addrLowBits = "00000000000"; <add> <add> BinaryConstant targetAddr = new BinaryConstant("0000" + "0" + addrLowBits); <add> BinaryConstant ramAddr = new BinaryConstant(addrLowBits); <add> <add> BinaryConstant targetValue = new BinaryConstant("11111111"); <add> <add> PageHandler ramHandler = new CPURAMHandler(); <add> CodeGenerator ramInit = new InitCPURAM(); <add> CodeGenerator csDrive = new ChipSelectDriver(ramHandler.getHandlerPrefix(), new BinaryConstant("1")); <add> CodeGenerator busDrive = new BusDriver( <add> targetAddr, new BinaryConstant("1"), targetValue); <add> CodeGenerator memoryVerifier1 = new VerifyRAMContents(ramAddr, ramInitialValue); <add> CodeGenerator memoryVerifier2 = new VerifyRAMContents(ramAddr, targetValue); <add> <add> // If CS is high and WE is 1, the memory should be updated to reflect the written value. <add> <add> exprs.addAll(reg.apply(ramInit)); <add> exprs.addAll(reg.apply(csDrive)); <add> exprs.addAll(reg.apply(busDrive)); <add> exprs.addAll(reg.apply(memoryVerifier1)); <add> exprs.addAll(reg.apply(ramHandler)); <add> exprs.addAll(reg.apply(memoryVerifier2)); <add> <add> try(Z3 z3 = new Z3()) { <add> z3.open(); <add> for(SExpression expr : exprs) { <add> z3.write(expr.toString()); <add> } <add> assertTrue(z3.checkSat()); <add> } <add> } <add> <ide> }
Java
apache-2.0
1170dea0b8fd8baa8129183e29192eeb47fa2e9e
0
openbaton/generic-vnfm,openbaton/generic-vnfm
package org.openbaton.registration; import com.google.gson.Gson; import com.google.gson.JsonObject; import com.google.gson.JsonPrimitive; import com.rabbitmq.client.AMQP; import com.rabbitmq.client.Channel; import com.rabbitmq.client.Connection; import com.rabbitmq.client.ConnectionFactory; import com.rabbitmq.client.DefaultConsumer; import com.rabbitmq.client.Envelope; import org.openbaton.catalogue.nfvo.ManagerCredentials; import org.openbaton.catalogue.nfvo.VnfmManagerEndpoint; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.amqp.rabbit.connection.CachingConnectionFactory; import org.springframework.amqp.rabbit.core.RabbitTemplate; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Value; import org.springframework.boot.context.properties.ConfigurationProperties; import org.springframework.context.annotation.Scope; import org.springframework.stereotype.Service; import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.ObjectInputStream; import java.util.UUID; import java.util.concurrent.ArrayBlockingQueue; import java.util.concurrent.BlockingQueue; import java.util.concurrent.TimeoutException; /** This class handles the registration of Vnfms and plugins to the Nfvo. */ @Service @Scope("prototype") @ConfigurationProperties public class Registration { private static Logger log = LoggerFactory.getLogger(Registration.class); @Autowired private Gson gson; private String username; private String password; @Value("${vnfm.connect.tries:20}") private int maxTries; /** * This method registers a Vnfm to the Nfvo by sending a request to the nfvo.manager.handling * queue using the given RabbitTemplate. The Nfvo's answer contains a username and password which * is injected into the RabbitTemplate so that it can send requests to the queue dedicated to the * specific Vnfm possessing the RabbitTemplate object. * * @param rabbitTemplate */ public String[] registerVnfmToNfvo(RabbitTemplate rabbitTemplate, VnfmManagerEndpoint endpoint) throws InterruptedException { JsonObject message = new JsonObject(); message.add("type", new JsonPrimitive(endpoint.getType())); message.add("action", new JsonPrimitive("register")); message.add("vnfmManagerEndpoint", gson.toJsonTree(endpoint, VnfmManagerEndpoint.class)); log.debug("Registering the Vnfm to the Nfvo"); int tries = 0; Object res = null; if (maxTries < 0) maxTries = Integer.MAX_VALUE; while (tries < maxTries) { res = rabbitTemplate.convertSendAndReceive("nfvo.manager.handling", gson.toJson(message)); if (res == null) { log.debug("NFVO answer is null, i suppose it is not running yet, i will try again in 2,5 seconds."); Thread.sleep(2500); tries++; } else { break; } } if (res == null) { throw new IllegalArgumentException("The NFVO's answer to the registration request is null."); } if (!(res instanceof ManagerCredentials)) { throw new IllegalArgumentException( "The NFVO's answer to the registration request should be of type ManagerCredentials, but it is " + res.getClass().getSimpleName()); } this.username = ((ManagerCredentials) res).getRabbitUsername(); this.password = ((ManagerCredentials) res).getRabbitPassword(); ((CachingConnectionFactory) rabbitTemplate.getConnectionFactory()).setUsername(username); ((CachingConnectionFactory) rabbitTemplate.getConnectionFactory()).setPassword(password); String[] usernamePassword = new String[2]; usernamePassword[0] = username; usernamePassword[1] = password; return usernamePassword; } /** * This method deregisters a Vnfm from the Nfvo by sending a request to the nfvo.manager.handling * queue using the given RabbitTemplate. The rabbitTemplate object should be obtained from the * Vnfm's VnfmSpringHelperRabbit object. * * @param rabbitTemplate */ public void deregisterVnfmFromNfvo(RabbitTemplate rabbitTemplate, VnfmManagerEndpoint endpoint) { JsonObject message = new JsonObject(); message.add("username", new JsonPrimitive(this.username)); message.add("action", new JsonPrimitive("deregister")); message.add("password", new JsonPrimitive(this.password)); message.add("vnfmManagerEndpoint", gson.toJsonTree(endpoint)); log.debug("Deregister the Vnfm from the Nfvo"); rabbitTemplate.convertAndSend("nfvo.manager.handling", gson.toJson(message)); } /** * Sends a registration message to the NFVO and returns a managerCredentials object from which the * rabbitmq username and password can be obtained. * * @param brokerIp * @param port * @param username * @param password * @param pluginName * @return * @throws IOException * @throws TimeoutException */ public ManagerCredentials registerPluginToNfvo( String brokerIp, int port, String username, String password, String virtualHost, String pluginName) throws IOException, TimeoutException, InterruptedException { String message = "{'type':'" + pluginName + "','action':'register'}"; ConnectionFactory factory = new ConnectionFactory(); factory.setHost(brokerIp); factory.setPort(port); factory.setUsername(username); factory.setPassword(password); factory.setVirtualHost(virtualHost); Connection connection = factory.newConnection(); Channel channel = connection.createChannel(); // check if exchange and queue exist channel.exchangeDeclarePassive("openbaton-exchange"); channel.queueDeclarePassive("nfvo.manager.handling"); channel.basicQos(1); String replyQueueName = "amq.rabbitmq.reply-to"; final String corrId = UUID.randomUUID().toString(); AMQP.BasicProperties props = new AMQP.BasicProperties.Builder().correlationId(corrId).replyTo(replyQueueName).build(); final BlockingQueue<ManagerCredentials> response = new ArrayBlockingQueue<ManagerCredentials>(1); channel.basicConsume( replyQueueName, true, new DefaultConsumer(channel) { @Override public void handleDelivery( String consumerTag, Envelope envelope, AMQP.BasicProperties properties, byte[] body) throws IOException { if (properties.getCorrelationId().equals(corrId)) { ManagerCredentials managerCredentials = null; ByteArrayInputStream byteArrayInputStream = new ByteArrayInputStream(body); ObjectInputStream objectInputStream = new ObjectInputStream(byteArrayInputStream); Object replyObject = null; try { replyObject = objectInputStream.readObject(); } catch (ClassNotFoundException e) { throw new RuntimeException( "Could not deserialize the registration request's reply.", e.getCause()); } if (!(replyObject instanceof ManagerCredentials)) throw new RuntimeException( "Could not obtain credentials while registering plugin to Nfvo since the reply is no ManagerCredentials object"); managerCredentials = (ManagerCredentials) replyObject; response.offer(managerCredentials); } } }); channel.basicPublish( "openbaton-exchange", "nfvo.manager.handling", props, message.getBytes("UTF-8")); ManagerCredentials managerCredentials = response.take(); channel.close(); connection.close(); return managerCredentials; } public void deregisterPluginFromNfvo( String brokerIp, int port, String username, String password, String virtualHost, String managerCredentialUsername, String managerCredentialPassword) throws IOException, TimeoutException { String message = "{'username':'" + managerCredentialUsername + "','action':'deregister','password':'" + managerCredentialPassword + "'}"; ConnectionFactory factory = new ConnectionFactory(); factory.setHost(brokerIp); factory.setPort(port); factory.setUsername(username); factory.setPassword(password); factory.setVirtualHost(virtualHost); Connection connection = factory.newConnection(); Channel channel = connection.createChannel(); // check if exchange and queue exist channel.exchangeDeclarePassive("openbaton-exchange"); channel.queueDeclarePassive("nfvo.manager.handling"); channel.basicQos(1); channel.basicPublish("openbaton-exchange", "nfvo.manager.handling", null, message.getBytes()); channel.close(); connection.close(); } }
registration/src/main/java/org/openbaton/registration/Registration.java
package org.openbaton.registration; import com.google.gson.Gson; import com.google.gson.JsonObject; import com.google.gson.JsonPrimitive; import com.rabbitmq.client.*; import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.ObjectInputStream; import java.util.UUID; import java.util.concurrent.ArrayBlockingQueue; import java.util.concurrent.BlockingQueue; import java.util.concurrent.TimeoutException; import org.openbaton.catalogue.nfvo.ManagerCredentials; import org.openbaton.catalogue.nfvo.VnfmManagerEndpoint; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.amqp.rabbit.connection.CachingConnectionFactory; import org.springframework.amqp.rabbit.core.RabbitTemplate; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.annotation.Scope; import org.springframework.stereotype.Service; /** This class handles the registration of Vnfms and plugins to the Nfvo. */ @Service @Scope("prototype") public class Registration { private static Logger log = LoggerFactory.getLogger(Registration.class); @Autowired private Gson gson; private String username; private String password; /** * This method registers a Vnfm to the Nfvo by sending a request to the nfvo.manager.handling * queue using the given RabbitTemplate. The Nfvo's answer contains a username and password which * is injected into the RabbitTemplate so that it can send requests to the queue dedicated to the * specific Vnfm possessing the RabbitTemplate object. * * @param rabbitTemplate */ public String[] registerVnfmToNfvo(RabbitTemplate rabbitTemplate, VnfmManagerEndpoint endpoint) { JsonObject message = new JsonObject(); message.add("type", new JsonPrimitive(endpoint.getType())); message.add("action", new JsonPrimitive("register")); message.add("vnfmManagerEndpoint", gson.toJsonTree(endpoint, VnfmManagerEndpoint.class)); log.debug("Registering the Vnfm to the Nfvo"); Object res = rabbitTemplate.convertSendAndReceive("nfvo.manager.handling", gson.toJson(message)); if (res == null) throw new IllegalArgumentException("The NFVO's answer to the registration request is null."); if (!(res instanceof ManagerCredentials)) throw new IllegalArgumentException( "The NFVO's answer to the registration request should be of type ManagerCredentials, but it is " + res.getClass().getSimpleName()); this.username = ((ManagerCredentials) res).getRabbitUsername(); this.password = ((ManagerCredentials) res).getRabbitPassword(); ((CachingConnectionFactory) rabbitTemplate.getConnectionFactory()).setUsername(username); ((CachingConnectionFactory) rabbitTemplate.getConnectionFactory()).setPassword(password); String[] usernamePassword = new String[2]; usernamePassword[0] = username; usernamePassword[1] = password; return usernamePassword; } /** * This method deregisters a Vnfm from the Nfvo by sending a request to the nfvo.manager.handling * queue using the given RabbitTemplate. The rabbitTemplate object should be obtained from the * Vnfm's VnfmSpringHelperRabbit object. * * @param rabbitTemplate */ public void deregisterVnfmFromNfvo(RabbitTemplate rabbitTemplate, VnfmManagerEndpoint endpoint) { JsonObject message = new JsonObject(); message.add("username", new JsonPrimitive(this.username)); message.add("action", new JsonPrimitive("deregister")); message.add("password", new JsonPrimitive(this.password)); message.add("vnfmManagerEndpoint", gson.toJsonTree(endpoint)); log.debug("Deregister the Vnfm from the Nfvo"); rabbitTemplate.convertAndSend("nfvo.manager.handling", gson.toJson(message)); } /** * Sends a registration message to the NFVO and returns a managerCredentials object from which the * rabbitmq username and password can be obtained. * * @param brokerIp * @param port * @param username * @param password * @param pluginName * @return * @throws IOException * @throws TimeoutException */ public ManagerCredentials registerPluginToNfvo( String brokerIp, int port, String username, String password, String virtualHost, String pluginName) throws IOException, TimeoutException, InterruptedException { String message = "{'type':'" + pluginName + "','action':'register'}"; ConnectionFactory factory = new ConnectionFactory(); factory.setHost(brokerIp); factory.setPort(port); factory.setUsername(username); factory.setPassword(password); factory.setVirtualHost(virtualHost); Connection connection = factory.newConnection(); Channel channel = connection.createChannel(); // check if exchange and queue exist channel.exchangeDeclarePassive("openbaton-exchange"); channel.queueDeclarePassive("nfvo.manager.handling"); channel.basicQos(1); String replyQueueName = "amq.rabbitmq.reply-to"; final String corrId = UUID.randomUUID().toString(); AMQP.BasicProperties props = new AMQP.BasicProperties.Builder().correlationId(corrId).replyTo(replyQueueName).build(); final BlockingQueue<ManagerCredentials> response = new ArrayBlockingQueue<ManagerCredentials>(1); channel.basicConsume( replyQueueName, true, new DefaultConsumer(channel) { @Override public void handleDelivery( String consumerTag, Envelope envelope, AMQP.BasicProperties properties, byte[] body) throws IOException { if (properties.getCorrelationId().equals(corrId)) { ManagerCredentials managerCredentials = null; ByteArrayInputStream byteArrayInputStream = new ByteArrayInputStream(body); ObjectInputStream objectInputStream = new ObjectInputStream(byteArrayInputStream); Object replyObject = null; try { replyObject = objectInputStream.readObject(); } catch (ClassNotFoundException e) { throw new RuntimeException( "Could not deserialize the registration request's reply.", e.getCause()); } if (!(replyObject instanceof ManagerCredentials)) throw new RuntimeException( "Could not obtain credentials while registering plugin to Nfvo since the reply is no ManagerCredentials object"); managerCredentials = (ManagerCredentials) replyObject; response.offer(managerCredentials); } } }); channel.basicPublish( "openbaton-exchange", "nfvo.manager.handling", props, message.getBytes("UTF-8")); ManagerCredentials managerCredentials = response.take(); channel.close(); connection.close(); return managerCredentials; } public void deregisterPluginFromNfvo( String brokerIp, int port, String username, String password, String virtualHost, String managerCredentialUsername, String managerCredentialPassword) throws IOException, TimeoutException { String message = "{'username':'" + managerCredentialUsername + "','action':'deregister','password':'" + managerCredentialPassword + "'}"; ConnectionFactory factory = new ConnectionFactory(); factory.setHost(brokerIp); factory.setPort(port); factory.setUsername(username); factory.setPassword(password); factory.setVirtualHost(virtualHost); Connection connection = factory.newConnection(); Channel channel = connection.createChannel(); // check if exchange and queue exist channel.exchangeDeclarePassive("openbaton-exchange"); channel.queueDeclarePassive("nfvo.manager.handling"); channel.basicQos(1); channel.basicPublish("openbaton-exchange", "nfvo.manager.handling", null, message.getBytes()); channel.close(); connection.close(); } }
add wait for nfvo to be started
registration/src/main/java/org/openbaton/registration/Registration.java
add wait for nfvo to be started
<ide><path>egistration/src/main/java/org/openbaton/registration/Registration.java <ide> import com.google.gson.Gson; <ide> import com.google.gson.JsonObject; <ide> import com.google.gson.JsonPrimitive; <del>import com.rabbitmq.client.*; <add>import com.rabbitmq.client.AMQP; <add>import com.rabbitmq.client.Channel; <add>import com.rabbitmq.client.Connection; <add>import com.rabbitmq.client.ConnectionFactory; <add>import com.rabbitmq.client.DefaultConsumer; <add>import com.rabbitmq.client.Envelope; <add> <add>import org.openbaton.catalogue.nfvo.ManagerCredentials; <add>import org.openbaton.catalogue.nfvo.VnfmManagerEndpoint; <add>import org.slf4j.Logger; <add>import org.slf4j.LoggerFactory; <add>import org.springframework.amqp.rabbit.connection.CachingConnectionFactory; <add>import org.springframework.amqp.rabbit.core.RabbitTemplate; <add>import org.springframework.beans.factory.annotation.Autowired; <add>import org.springframework.beans.factory.annotation.Value; <add>import org.springframework.boot.context.properties.ConfigurationProperties; <add>import org.springframework.context.annotation.Scope; <add>import org.springframework.stereotype.Service; <add> <ide> import java.io.ByteArrayInputStream; <ide> import java.io.IOException; <ide> import java.io.ObjectInputStream; <ide> import java.util.concurrent.ArrayBlockingQueue; <ide> import java.util.concurrent.BlockingQueue; <ide> import java.util.concurrent.TimeoutException; <del>import org.openbaton.catalogue.nfvo.ManagerCredentials; <del>import org.openbaton.catalogue.nfvo.VnfmManagerEndpoint; <del>import org.slf4j.Logger; <del>import org.slf4j.LoggerFactory; <del>import org.springframework.amqp.rabbit.connection.CachingConnectionFactory; <del>import org.springframework.amqp.rabbit.core.RabbitTemplate; <del>import org.springframework.beans.factory.annotation.Autowired; <del>import org.springframework.context.annotation.Scope; <del>import org.springframework.stereotype.Service; <ide> <ide> /** This class handles the registration of Vnfms and plugins to the Nfvo. */ <ide> @Service <ide> @Scope("prototype") <add>@ConfigurationProperties <ide> public class Registration { <ide> <ide> private static Logger log = LoggerFactory.getLogger(Registration.class); <ide> <ide> private String username; <ide> private String password; <add> @Value("${vnfm.connect.tries:20}") <add> private int maxTries; <ide> <ide> /** <ide> * This method registers a Vnfm to the Nfvo by sending a request to the nfvo.manager.handling <ide> * <ide> * @param rabbitTemplate <ide> */ <del> public String[] registerVnfmToNfvo(RabbitTemplate rabbitTemplate, VnfmManagerEndpoint endpoint) { <add> public String[] registerVnfmToNfvo(RabbitTemplate rabbitTemplate, VnfmManagerEndpoint endpoint) throws <add> InterruptedException { <ide> <ide> JsonObject message = new JsonObject(); <ide> message.add("type", new JsonPrimitive(endpoint.getType())); <ide> message.add("action", new JsonPrimitive("register")); <ide> message.add("vnfmManagerEndpoint", gson.toJsonTree(endpoint, VnfmManagerEndpoint.class)); <ide> log.debug("Registering the Vnfm to the Nfvo"); <del> Object res = <del> rabbitTemplate.convertSendAndReceive("nfvo.manager.handling", gson.toJson(message)); <del> if (res == null) <add> int tries = 0; <add> Object res = null; <add> if (maxTries < 0) <add> maxTries = Integer.MAX_VALUE; <add> while (tries < maxTries) { <add> res = rabbitTemplate.convertSendAndReceive("nfvo.manager.handling", gson.toJson(message)); <add> if (res == null) { <add> log.debug("NFVO answer is null, i suppose it is not running yet, i will try again in 2,5 seconds."); <add> Thread.sleep(2500); <add> tries++; <add> } else { <add> break; <add> } <add> } <add> if (res == null) { <ide> throw new IllegalArgumentException("The NFVO's answer to the registration request is null."); <del> if (!(res instanceof ManagerCredentials)) <add> } <add> if (!(res instanceof ManagerCredentials)) { <ide> throw new IllegalArgumentException( <del> "The NFVO's answer to the registration request should be of type ManagerCredentials, but it is " <del> + res.getClass().getSimpleName()); <add> "The NFVO's answer to the registration request should be of type ManagerCredentials, but it is " + <add> res.getClass().getSimpleName()); <add> } <ide> this.username = ((ManagerCredentials) res).getRabbitUsername(); <ide> this.password = ((ManagerCredentials) res).getRabbitPassword(); <ide> ((CachingConnectionFactory) rabbitTemplate.getConnectionFactory()).setUsername(username);
Java
apache-2.0
5547b2b23764d7c5fd2778ab56801cac67172957
0
vector-im/vector-android,vector-im/vector-android,noepitome/neon-android,floviolleau/vector-android,noepitome/neon-android,noepitome/neon-android,vector-im/riot-android,riot-spanish/riot-android,noepitome/neon-android,vector-im/vector-android,riot-spanish/riot-android,vector-im/riot-android,riot-spanish/riot-android,vector-im/riot-android,riot-spanish/riot-android,vector-im/vector-android,floviolleau/vector-android,vector-im/riot-android,floviolleau/vector-android,vector-im/riot-android
/* * Copyright 2016 OpenMarket Ltd * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package im.vector.fragments; import android.os.Bundle; import android.text.TextUtils; import android.util.Log; import android.view.View; import android.widget.Toast; import org.matrix.androidsdk.adapters.MessageRow; import org.matrix.androidsdk.data.RoomState; import org.matrix.androidsdk.rest.callback.ApiCallback; import org.matrix.androidsdk.rest.model.Event; import org.matrix.androidsdk.rest.model.MatrixError; import org.matrix.androidsdk.rest.model.Message; import org.matrix.androidsdk.rest.model.TokensChunkResponse; import org.matrix.androidsdk.util.JsonUtils; import java.util.ArrayList; import java.util.Collections; import java.util.List; import im.vector.activity.VectorBaseSearchActivity; public class VectorSearchRoomFilesListFragment extends VectorSearchRoomsFilesListFragment { static final int MESSAGES_PAGINATION_LIMIT = 50; static final String LOG_TAG = "SearchRoomFilesListFrag"; // set to false when there is no more available message in the room history private boolean mCanPaginateBack = true; /** * static constructor * @param matrixId the session Id. * @param layoutResId the used layout. */ public static VectorSearchRoomFilesListFragment newInstance(String matrixId, String roomId, int layoutResId) { VectorSearchRoomFilesListFragment frag = new VectorSearchRoomFilesListFragment(); Bundle args = new Bundle(); args.putInt(ARG_LAYOUT_ID, layoutResId); args.putString(ARG_MATRIX_ID, matrixId); if (null != roomId) { args.putString(ARG_ROOM_ID, roomId); } frag.setArguments(args); return frag; } /** * Cancel the catching requests. */ public void cancelCatchingRequests() { super.cancelCatchingRequests(); mIsBackPaginating = false; mCanPaginateBack = true; mRoom.cancelRemoteHistoryRequest(); mNextBatch = mRoom.getLiveState().getToken(); } @Override public void onPause() { super.onPause(); // Fix memory leak: VectorRoomDetailsActivity() instances leak cancelCatchingRequests(); } @Override public void onResume() { super.onResume(); if (getActivity() instanceof VectorBaseSearchActivity.IVectorSearchActivity) { ((VectorBaseSearchActivity.IVectorSearchActivity)getActivity()).refreshSearch(); } else { startFilesSearch(null); } } /** * Update the searched pattern. */ public void startFilesSearch(final OnSearchResultListener onSearchResultListener) { // please wait if (mIsBackPaginating) { return; } // add the listener to list to warn when the search is done. if (null != onSearchResultListener) { mSearchListeners.add(onSearchResultListener); } // will be called when resumed // onCreateView is not yet called if (null == mMessageListView) { return; } mIsBackPaginating = true; mMessageListView.setVisibility(View.GONE); remoteRoomHistoryRequest(new ArrayList<Event>(), new ApiCallback<ArrayList<Event>>() { @Override public void onSuccess(ArrayList<Event> eventsChunk) { ArrayList<MessageRow> messageRows = new ArrayList<>(eventsChunk.size()); RoomState liveState = mRoom.getLiveState(); for (Event event : eventsChunk) { messageRows.add(new MessageRow(event, liveState)); } Collections.reverse(messageRows); mAdapter.clear(); mAdapter.addAll(messageRows); mMessageListView.setAdapter(mAdapter); mMessageListView.setOnScrollListener(mScrollListener); // scroll to the bottom scrollToBottom(); mMessageListView.setVisibility(View.VISIBLE); for(OnSearchResultListener listener : mSearchListeners) { try { listener.onSearchSucceed(messageRows.size()); } catch (Exception e) { Log.e(LOG_TAG, "## remoteRoomHistoryRequest() : onSearchSucceed failed " + e.getMessage()); } } mIsBackPaginating = false; mSearchListeners.clear(); } private void onError() { mMessageListView.setVisibility(View.GONE); // clear the results list if teh search fails mAdapter.clear(); for(OnSearchResultListener listener : mSearchListeners) { try { listener.onSearchFailed(); } catch (Exception e) { Log.e(LOG_TAG, "## remoteRoomHistoryRequest() : onSearchFailed failed " + e.getMessage()); } } mIsBackPaginating = false; mSearchListeners.clear(); } @Override public void onNetworkError(Exception e) { Toast.makeText(getActivity(), e.getLocalizedMessage(), Toast.LENGTH_LONG).show(); onError(); } @Override public void onMatrixError(MatrixError e) { Toast.makeText(getActivity(), e.getLocalizedMessage(), Toast.LENGTH_LONG).show(); onError(); } @Override public void onUnexpectedError(Exception e) { Toast.makeText(getActivity(), e.getLocalizedMessage(), Toast.LENGTH_LONG).show(); onError(); } }); } /** * Search the pattern on a pagination server side. */ @Override public void backPaginate(boolean fillHistory) { // please wait if (mIsBackPaginating || !mCanPaginateBack) { return; } mIsBackPaginating = true; final int firstPos = mMessageListView.getFirstVisiblePosition(); final int countBeforeUpdate = mAdapter.getCount(); showLoadingBackProgress(); remoteRoomHistoryRequest(new ArrayList<Event>(), new ApiCallback<ArrayList<Event>>() { @Override public void onSuccess(final ArrayList<Event> eventChunks) { VectorSearchRoomFilesListFragment.this.getActivity().runOnUiThread(new Runnable() { @Override public void run() { // is there any result to display if (0 != eventChunks.size()) { mAdapter.setNotifyOnChange(false); for (Event event : eventChunks) { MessageRow row = new MessageRow(event, mRoom.getLiveState()); mAdapter.insert(row, 0); } // Scroll the list down to where it was before adding rows to the top mUiHandler.post(new Runnable() { @Override public void run() { // refresh the list only at the end of the sync // else the one by one message refresh gives a weird UX // The application is almost frozen during the mAdapter.notifyDataSetChanged(); // do not use count because some messages are not displayed // so we compute the new pos mMessageListView.setSelection(firstPos + (mAdapter.getCount() - countBeforeUpdate)); mIsBackPaginating = false; } }); } else { mIsBackPaginating = false; } VectorSearchRoomFilesListFragment.this.hideLoadingBackProgress(); } }); } private void onError() { mIsBackPaginating = false; VectorSearchRoomFilesListFragment.this.hideLoadingBackProgress(); } // the request will be auto restarted when a valid network will be found @Override public void onNetworkError(Exception e) { Toast.makeText(getActivity(), e.getLocalizedMessage(), Toast.LENGTH_LONG).show(); onError(); } @Override public void onMatrixError(MatrixError e) { Toast.makeText(getActivity(), e.getLocalizedMessage(), Toast.LENGTH_LONG).show(); onError(); } @Override public void onUnexpectedError(Exception e) { Toast.makeText(getActivity(), e.getLocalizedMessage(), Toast.LENGTH_LONG).show(); onError(); } }); } /** * Filter and append the found events * @param events the matched events list * @param eventsToAppend the retrieved events list. */ private void appendEvents(ArrayList<Event> events, List<Event> eventsToAppend) { // filter ArrayList<Event> filteredEvents = new ArrayList<>(eventsToAppend.size()); for(Event event : eventsToAppend) { if (Event.EVENT_TYPE_MESSAGE.equals(event.type)) { Message message = JsonUtils.toMessage(event.content); if (Message.MSGTYPE_FILE.equals(message.msgtype) || Message.MSGTYPE_IMAGE.equals(message.msgtype) || Message.MSGTYPE_VIDEO.equals(message.msgtype)) { filteredEvents.add(event); } } } events.addAll(filteredEvents); } /** * Search some files until find out at least 10 matching messages. * @param events the result events lists * @param callback the result callback */ private void remoteRoomHistoryRequest(final ArrayList<Event> events, final ApiCallback<ArrayList<Event>> callback) { mRoom.requestServerRoomHistory(mNextBatch, MESSAGES_PAGINATION_LIMIT, new ApiCallback<TokensChunkResponse<Event>>() { @Override public void onSuccess(TokensChunkResponse<Event> eventsChunk) { if ((null == mNextBatch) || TextUtils.equals(eventsChunk.start, mNextBatch)) { // no more message in the history if (0 == eventsChunk.chunk.size()) { mCanPaginateBack = false; callback.onSuccess(events); } else { // append the retrieved one appendEvents(events, eventsChunk.chunk); mNextBatch = eventsChunk.end; if (events.size() >= 10) { callback.onSuccess(events); } else { remoteRoomHistoryRequest(events, callback); } } } } private void onError() { callback.onSuccess(events); } @Override public void onNetworkError(Exception e) { Toast.makeText(getActivity(), e.getLocalizedMessage(), Toast.LENGTH_LONG).show(); onError(); } @Override public void onMatrixError(MatrixError e) { Toast.makeText(getActivity(), e.getLocalizedMessage(), Toast.LENGTH_LONG).show(); onError(); } @Override public void onUnexpectedError(Exception e) { Toast.makeText(getActivity(), e.getLocalizedMessage(), Toast.LENGTH_LONG).show(); onError(); } }); } }
vector/src/main/java/im/vector/fragments/VectorSearchRoomFilesListFragment.java
/* * Copyright 2016 OpenMarket Ltd * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package im.vector.fragments; import android.os.Bundle; import android.text.TextUtils; import android.util.Log; import android.view.View; import android.widget.Toast; import org.matrix.androidsdk.adapters.MessageRow; import org.matrix.androidsdk.data.RoomState; import org.matrix.androidsdk.rest.callback.ApiCallback; import org.matrix.androidsdk.rest.model.Event; import org.matrix.androidsdk.rest.model.MatrixError; import org.matrix.androidsdk.rest.model.Message; import org.matrix.androidsdk.rest.model.TokensChunkResponse; import org.matrix.androidsdk.util.JsonUtils; import java.util.ArrayList; import java.util.Collections; import java.util.List; import im.vector.activity.VectorBaseSearchActivity; public class VectorSearchRoomFilesListFragment extends VectorSearchRoomsFilesListFragment { static final int MESSAGES_PAGINATION_LIMIT = 50; static final String LOG_TAG = "SearchRoomFilesListFrag"; // set to false when there is no more available message in the room history private boolean mCanPaginateBack = true; /** * static constructor * @param matrixId the session Id. * @param layoutResId the used layout. */ public static VectorSearchRoomFilesListFragment newInstance(String matrixId, String roomId, int layoutResId) { VectorSearchRoomFilesListFragment frag = new VectorSearchRoomFilesListFragment(); Bundle args = new Bundle(); args.putInt(ARG_LAYOUT_ID, layoutResId); args.putString(ARG_MATRIX_ID, matrixId); if (null != roomId) { args.putString(ARG_ROOM_ID, roomId); } frag.setArguments(args); return frag; } /** * Cancel the catching requests. */ public void cancelCatchingRequests() { super.cancelCatchingRequests(); mIsBackPaginating = false; mCanPaginateBack = true; mRoom.cancelRemoteHistoryRequest(); mNextBatch = mRoom.getLiveState().getToken(); } @Override public void onPause() { super.onPause(); // Fix memory leak: VectorRoomDetailsActivity() instances leak cancelCatchingRequests(); } @Override public void onResume() { super.onResume(); if (getActivity() instanceof VectorBaseSearchActivity.IVectorSearchActivity) { ((VectorBaseSearchActivity.IVectorSearchActivity)getActivity()).refreshSearch(); } else { startFilesSearch(null); } } /** * Update the searched pattern. */ public void startFilesSearch(final OnSearchResultListener onSearchResultListener) { // please wait if (mIsBackPaginating) { return; } // add the listener to list to warn when the search is done. if (null != onSearchResultListener) { mSearchListeners.add(onSearchResultListener); } // will be called when resumed // onCreateView is not yet called if (null == mMessageListView) { return; } mIsBackPaginating = true; mMessageListView.setVisibility(View.GONE); remoteRoomHistoryRequest(new ArrayList<Event>(), new ApiCallback<ArrayList<Event>>() { @Override public void onSuccess(ArrayList<Event> eventsChunk) { ArrayList<MessageRow> messageRows = new ArrayList<>(eventsChunk.size()); RoomState liveState = mRoom.getLiveState(); for (Event event : eventsChunk) { messageRows.add(new MessageRow(event, liveState)); } Collections.reverse(messageRows); mAdapter.clear(); mAdapter.addAll(messageRows); mMessageListView.setAdapter(mAdapter); mMessageListView.setOnScrollListener(mScrollListener); // scroll to the bottom scrollToBottom(); mMessageListView.setVisibility(View.VISIBLE); for(OnSearchResultListener listener : mSearchListeners) { try { listener.onSearchSucceed(messageRows.size()); } catch (Exception e) { Log.e(LOG_TAG, "## remoteRoomHistoryRequest() : onSearchSucceed failed " + e.getMessage()); } } mIsBackPaginating = false; mSearchListeners.clear(); } private void onError() { mMessageListView.setVisibility(View.GONE); // clear the results list if teh search fails mAdapter.clear(); for(OnSearchResultListener listener : mSearchListeners) { try { listener.onSearchFailed(); } catch (Exception e) { Log.e(LOG_TAG, "## remoteRoomHistoryRequest() : onSearchFailed failed " + e.getMessage()); } } mIsBackPaginating = false; mSearchListeners.clear(); } @Override public void onNetworkError(Exception e) { Toast.makeText(getActivity(), e.getLocalizedMessage(), Toast.LENGTH_LONG).show(); onError(); } @Override public void onMatrixError(MatrixError e) { Toast.makeText(getActivity(), e.getLocalizedMessage(), Toast.LENGTH_LONG).show(); onError(); } @Override public void onUnexpectedError(Exception e) { Toast.makeText(getActivity(), e.getLocalizedMessage(), Toast.LENGTH_LONG).show(); onError(); } }); } /** * Search the pattern on a pagination server side. */ public void backPaginate() { // please wait if (mIsBackPaginating || !mCanPaginateBack) { return; } mIsBackPaginating = true; final int firstPos = mMessageListView.getFirstVisiblePosition(); final int countBeforeUpdate = mAdapter.getCount(); showLoadingBackProgress(); remoteRoomHistoryRequest(new ArrayList<Event>(), new ApiCallback<ArrayList<Event>>() { @Override public void onSuccess(final ArrayList<Event> eventChunks) { VectorSearchRoomFilesListFragment.this.getActivity().runOnUiThread(new Runnable() { @Override public void run() { // is there any result to display if (0 != eventChunks.size()) { mAdapter.setNotifyOnChange(false); for (Event event : eventChunks) { MessageRow row = new MessageRow(event, mRoom.getLiveState()); mAdapter.insert(row, 0); } // Scroll the list down to where it was before adding rows to the top mUiHandler.post(new Runnable() { @Override public void run() { // refresh the list only at the end of the sync // else the one by one message refresh gives a weird UX // The application is almost frozen during the mAdapter.notifyDataSetChanged(); // do not use count because some messages are not displayed // so we compute the new pos mMessageListView.setSelection(firstPos + (mAdapter.getCount() - countBeforeUpdate)); mIsBackPaginating = false; } }); } else { mIsBackPaginating = false; } VectorSearchRoomFilesListFragment.this.hideLoadingBackProgress(); } }); } private void onError() { mIsBackPaginating = false; VectorSearchRoomFilesListFragment.this.hideLoadingBackProgress(); } // the request will be auto restarted when a valid network will be found @Override public void onNetworkError(Exception e) { Toast.makeText(getActivity(), e.getLocalizedMessage(), Toast.LENGTH_LONG).show(); onError(); } @Override public void onMatrixError(MatrixError e) { Toast.makeText(getActivity(), e.getLocalizedMessage(), Toast.LENGTH_LONG).show(); onError(); } @Override public void onUnexpectedError(Exception e) { Toast.makeText(getActivity(), e.getLocalizedMessage(), Toast.LENGTH_LONG).show(); onError(); } }); } /** * Filter and append the found events * @param events the matched events list * @param eventsToAppend the retrieved events list. */ private void appendEvents(ArrayList<Event> events, List<Event> eventsToAppend) { // filter ArrayList<Event> filteredEvents = new ArrayList<>(eventsToAppend.size()); for(Event event : eventsToAppend) { if (Event.EVENT_TYPE_MESSAGE.equals(event.type)) { Message message = JsonUtils.toMessage(event.content); if (Message.MSGTYPE_FILE.equals(message.msgtype) || Message.MSGTYPE_IMAGE.equals(message.msgtype) || Message.MSGTYPE_VIDEO.equals(message.msgtype)) { filteredEvents.add(event); } } } events.addAll(filteredEvents); } /** * Search some files until find out at least 10 matching messages. * @param events the result events lists * @param callback the result callback */ private void remoteRoomHistoryRequest(final ArrayList<Event> events, final ApiCallback<ArrayList<Event>> callback) { mRoom.requestServerRoomHistory(mNextBatch, MESSAGES_PAGINATION_LIMIT, new ApiCallback<TokensChunkResponse<Event>>() { @Override public void onSuccess(TokensChunkResponse<Event> eventsChunk) { if ((null == mNextBatch) || TextUtils.equals(eventsChunk.start, mNextBatch)) { // no more message in the history if (0 == eventsChunk.chunk.size()) { mCanPaginateBack = false; callback.onSuccess(events); } else { // append the retrieved one appendEvents(events, eventsChunk.chunk); mNextBatch = eventsChunk.end; if (events.size() >= 10) { callback.onSuccess(events); } else { remoteRoomHistoryRequest(events, callback); } } } } private void onError() { callback.onSuccess(events); } @Override public void onNetworkError(Exception e) { Toast.makeText(getActivity(), e.getLocalizedMessage(), Toast.LENGTH_LONG).show(); onError(); } @Override public void onMatrixError(MatrixError e) { Toast.makeText(getActivity(), e.getLocalizedMessage(), Toast.LENGTH_LONG).show(); onError(); } @Override public void onUnexpectedError(Exception e) { Toast.makeText(getActivity(), e.getLocalizedMessage(), Toast.LENGTH_LONG).show(); onError(); } }); } }
Room files list : the back pagination was also broken.
vector/src/main/java/im/vector/fragments/VectorSearchRoomFilesListFragment.java
Room files list : the back pagination was also broken.
<ide><path>ector/src/main/java/im/vector/fragments/VectorSearchRoomFilesListFragment.java <ide> /** <ide> * Search the pattern on a pagination server side. <ide> */ <del> public void backPaginate() { <add> @Override <add> public void backPaginate(boolean fillHistory) { <ide> // please wait <ide> if (mIsBackPaginating || !mCanPaginateBack) { <ide> return;
Java
apache-2.0
a4fbf3b0d0881db565ddb81cfd5d770857bc6d03
0
da1z/intellij-community,dslomov/intellij-community,fitermay/intellij-community,samthor/intellij-community,vvv1559/intellij-community,jagguli/intellij-community,fnouama/intellij-community,semonte/intellij-community,suncycheng/intellij-community,ryano144/intellij-community,holmes/intellij-community,robovm/robovm-studio,youdonghai/intellij-community,gnuhub/intellij-community,caot/intellij-community,pwoodworth/intellij-community,tmpgit/intellij-community,idea4bsd/idea4bsd,blademainer/intellij-community,allotria/intellij-community,wreckJ/intellij-community,suncycheng/intellij-community,michaelgallacher/intellij-community,asedunov/intellij-community,alphafoobar/intellij-community,MER-GROUP/intellij-community,MichaelNedzelsky/intellij-community,semonte/intellij-community,ol-loginov/intellij-community,da1z/intellij-community,izonder/intellij-community,samthor/intellij-community,vvv1559/intellij-community,izonder/intellij-community,dslomov/intellij-community,fengbaicanhe/intellij-community,nicolargo/intellij-community,jagguli/intellij-community,adedayo/intellij-community,ivan-fedorov/intellij-community,MichaelNedzelsky/intellij-community,ahb0327/intellij-community,wreckJ/intellij-community,salguarnieri/intellij-community,samthor/intellij-community,orekyuu/intellij-community,jagguli/intellij-community,Distrotech/intellij-community,vladmm/intellij-community,kdwink/intellij-community,hurricup/intellij-community,robovm/robovm-studio,lucafavatella/intellij-community,gnuhub/intellij-community,signed/intellij-community,Distrotech/intellij-community,supersven/intellij-community,ftomassetti/intellij-community,signed/intellij-community,dslomov/intellij-community,ahb0327/intellij-community,xfournet/intellij-community,ivan-fedorov/intellij-community,diorcety/intellij-community,adedayo/intellij-community,Distrotech/intellij-community,Distrotech/intellij-community,lucafavatella/intellij-community,alphafoobar/intellij-community,slisson/intellij-community,fitermay/intellij-community,Lekanich/intellij-community,xfournet/intellij-community,fengbaicanhe/intellij-community,semonte/intellij-community,SerCeMan/intellij-community,gnuhub/intellij-community,youdonghai/intellij-community,clumsy/intellij-community,ftomassetti/intellij-community,ivan-fedorov/intellij-community,da1z/intellij-community,amith01994/intellij-community,supersven/intellij-community,slisson/intellij-community,diorcety/intellij-community,clumsy/intellij-community,FHannes/intellij-community,signed/intellij-community,salguarnieri/intellij-community,MER-GROUP/intellij-community,salguarnieri/intellij-community,muntasirsyed/intellij-community,suncycheng/intellij-community,salguarnieri/intellij-community,apixandru/intellij-community,Distrotech/intellij-community,vladmm/intellij-community,FHannes/intellij-community,amith01994/intellij-community,MER-GROUP/intellij-community,akosyakov/intellij-community,ThiagoGarciaAlves/intellij-community,suncycheng/intellij-community,jexp/idea2,suncycheng/intellij-community,lucafavatella/intellij-community,fnouama/intellij-community,holmes/intellij-community,ernestp/consulo,holmes/intellij-community,semonte/intellij-community,retomerz/intellij-community,clumsy/intellij-community,asedunov/intellij-community,ibinti/intellij-community,fnouama/intellij-community,youdonghai/intellij-community,alphafoobar/intellij-community,muntasirsyed/intellij-community,caot/intellij-community,clumsy/intellij-community,FHannes/intellij-community,ol-loginov/intellij-community,TangHao1987/intellij-community,asedunov/intellij-community,MichaelNedzelsky/intellij-community,retomerz/intellij-community,asedunov/intellij-community,MichaelNedzelsky/intellij-community,salguarnieri/intellij-community,mglukhikh/intellij-community,pwoodworth/intellij-community,slisson/intellij-community,hurricup/intellij-community,kool79/intellij-community,blademainer/intellij-community,SerCeMan/intellij-community,dslomov/intellij-community,TangHao1987/intellij-community,fnouama/intellij-community,samthor/intellij-community,caot/intellij-community,ThiagoGarciaAlves/intellij-community,salguarnieri/intellij-community,gnuhub/intellij-community,hurricup/intellij-community,retomerz/intellij-community,nicolargo/intellij-community,apixandru/intellij-community,da1z/intellij-community,ryano144/intellij-community,vvv1559/intellij-community,mglukhikh/intellij-community,ryano144/intellij-community,ivan-fedorov/intellij-community,supersven/intellij-community,robovm/robovm-studio,diorcety/intellij-community,vvv1559/intellij-community,orekyuu/intellij-community,joewalnes/idea-community,fitermay/intellij-community,amith01994/intellij-community,xfournet/intellij-community,FHannes/intellij-community,akosyakov/intellij-community,petteyg/intellij-community,ahb0327/intellij-community,xfournet/intellij-community,blademainer/intellij-community,robovm/robovm-studio,orekyuu/intellij-community,ThiagoGarciaAlves/intellij-community,supersven/intellij-community,idea4bsd/idea4bsd,adedayo/intellij-community,suncycheng/intellij-community,allotria/intellij-community,semonte/intellij-community,clumsy/intellij-community,muntasirsyed/intellij-community,asedunov/intellij-community,pwoodworth/intellij-community,ThiagoGarciaAlves/intellij-community,wreckJ/intellij-community,muntasirsyed/intellij-community,ftomassetti/intellij-community,ryano144/intellij-community,caot/intellij-community,wreckJ/intellij-community,tmpgit/intellij-community,idea4bsd/idea4bsd,kdwink/intellij-community,ivan-fedorov/intellij-community,holmes/intellij-community,Lekanich/intellij-community,lucafavatella/intellij-community,izonder/intellij-community,amith01994/intellij-community,fitermay/intellij-community,alphafoobar/intellij-community,youdonghai/intellij-community,vvv1559/intellij-community,jexp/idea2,kdwink/intellij-community,FHannes/intellij-community,fengbaicanhe/intellij-community,jexp/idea2,ahb0327/intellij-community,SerCeMan/intellij-community,apixandru/intellij-community,lucafavatella/intellij-community,ftomassetti/intellij-community,suncycheng/intellij-community,da1z/intellij-community,holmes/intellij-community,diorcety/intellij-community,ol-loginov/intellij-community,ivan-fedorov/intellij-community,xfournet/intellij-community,Distrotech/intellij-community,fnouama/intellij-community,retomerz/intellij-community,kdwink/intellij-community,FHannes/intellij-community,asedunov/intellij-community,kool79/intellij-community,orekyuu/intellij-community,nicolargo/intellij-community,slisson/intellij-community,MichaelNedzelsky/intellij-community,ryano144/intellij-community,blademainer/intellij-community,Lekanich/intellij-community,petteyg/intellij-community,nicolargo/intellij-community,dslomov/intellij-community,gnuhub/intellij-community,robovm/robovm-studio,adedayo/intellij-community,vladmm/intellij-community,vvv1559/intellij-community,pwoodworth/intellij-community,hurricup/intellij-community,amith01994/intellij-community,MER-GROUP/intellij-community,ahb0327/intellij-community,clumsy/intellij-community,idea4bsd/idea4bsd,idea4bsd/idea4bsd,allotria/intellij-community,SerCeMan/intellij-community,samthor/intellij-community,vvv1559/intellij-community,gnuhub/intellij-community,apixandru/intellij-community,akosyakov/intellij-community,lucafavatella/intellij-community,ibinti/intellij-community,wreckJ/intellij-community,apixandru/intellij-community,robovm/robovm-studio,xfournet/intellij-community,apixandru/intellij-community,signed/intellij-community,mglukhikh/intellij-community,semonte/intellij-community,da1z/intellij-community,samthor/intellij-community,fengbaicanhe/intellij-community,joewalnes/idea-community,clumsy/intellij-community,xfournet/intellij-community,FHannes/intellij-community,fengbaicanhe/intellij-community,gnuhub/intellij-community,ivan-fedorov/intellij-community,ol-loginov/intellij-community,retomerz/intellij-community,kool79/intellij-community,fitermay/intellij-community,ibinti/intellij-community,da1z/intellij-community,alphafoobar/intellij-community,retomerz/intellij-community,nicolargo/intellij-community,hurricup/intellij-community,fitermay/intellij-community,ol-loginov/intellij-community,Lekanich/intellij-community,apixandru/intellij-community,muntasirsyed/intellij-community,muntasirsyed/intellij-community,hurricup/intellij-community,fengbaicanhe/intellij-community,pwoodworth/intellij-community,allotria/intellij-community,nicolargo/intellij-community,mglukhikh/intellij-community,retomerz/intellij-community,blademainer/intellij-community,Distrotech/intellij-community,ftomassetti/intellij-community,xfournet/intellij-community,orekyuu/intellij-community,apixandru/intellij-community,ahb0327/intellij-community,vladmm/intellij-community,signed/intellij-community,signed/intellij-community,ivan-fedorov/intellij-community,kool79/intellij-community,vladmm/intellij-community,ThiagoGarciaAlves/intellij-community,apixandru/intellij-community,semonte/intellij-community,kdwink/intellij-community,fnouama/intellij-community,ahb0327/intellij-community,tmpgit/intellij-community,supersven/intellij-community,asedunov/intellij-community,holmes/intellij-community,MER-GROUP/intellij-community,ryano144/intellij-community,pwoodworth/intellij-community,tmpgit/intellij-community,orekyuu/intellij-community,tmpgit/intellij-community,samthor/intellij-community,semonte/intellij-community,supersven/intellij-community,ahb0327/intellij-community,supersven/intellij-community,akosyakov/intellij-community,adedayo/intellij-community,salguarnieri/intellij-community,FHannes/intellij-community,ernestp/consulo,retomerz/intellij-community,samthor/intellij-community,petteyg/intellij-community,MER-GROUP/intellij-community,hurricup/intellij-community,petteyg/intellij-community,Lekanich/intellij-community,ol-loginov/intellij-community,adedayo/intellij-community,caot/intellij-community,jagguli/intellij-community,asedunov/intellij-community,SerCeMan/intellij-community,asedunov/intellij-community,izonder/intellij-community,consulo/consulo,tmpgit/intellij-community,michaelgallacher/intellij-community,lucafavatella/intellij-community,dslomov/intellij-community,FHannes/intellij-community,alphafoobar/intellij-community,samthor/intellij-community,petteyg/intellij-community,lucafavatella/intellij-community,TangHao1987/intellij-community,muntasirsyed/intellij-community,joewalnes/idea-community,michaelgallacher/intellij-community,lucafavatella/intellij-community,ThiagoGarciaAlves/intellij-community,orekyuu/intellij-community,xfournet/intellij-community,alphafoobar/intellij-community,wreckJ/intellij-community,ivan-fedorov/intellij-community,signed/intellij-community,allotria/intellij-community,MER-GROUP/intellij-community,semonte/intellij-community,MichaelNedzelsky/intellij-community,FHannes/intellij-community,SerCeMan/intellij-community,ivan-fedorov/intellij-community,asedunov/intellij-community,signed/intellij-community,joewalnes/idea-community,amith01994/intellij-community,fnouama/intellij-community,SerCeMan/intellij-community,nicolargo/intellij-community,TangHao1987/intellij-community,diorcety/intellij-community,hurricup/intellij-community,michaelgallacher/intellij-community,kool79/intellij-community,tmpgit/intellij-community,wreckJ/intellij-community,ol-loginov/intellij-community,allotria/intellij-community,idea4bsd/idea4bsd,alphafoobar/intellij-community,youdonghai/intellij-community,diorcety/intellij-community,mglukhikh/intellij-community,michaelgallacher/intellij-community,ibinti/intellij-community,ol-loginov/intellij-community,youdonghai/intellij-community,lucafavatella/intellij-community,asedunov/intellij-community,wreckJ/intellij-community,Distrotech/intellij-community,michaelgallacher/intellij-community,vvv1559/intellij-community,fitermay/intellij-community,suncycheng/intellij-community,slisson/intellij-community,petteyg/intellij-community,da1z/intellij-community,ibinti/intellij-community,nicolargo/intellij-community,consulo/consulo,hurricup/intellij-community,robovm/robovm-studio,ernestp/consulo,joewalnes/idea-community,kool79/intellij-community,TangHao1987/intellij-community,blademainer/intellij-community,mglukhikh/intellij-community,muntasirsyed/intellij-community,vvv1559/intellij-community,FHannes/intellij-community,gnuhub/intellij-community,diorcety/intellij-community,ibinti/intellij-community,xfournet/intellij-community,amith01994/intellij-community,akosyakov/intellij-community,caot/intellij-community,MichaelNedzelsky/intellij-community,diorcety/intellij-community,caot/intellij-community,joewalnes/idea-community,ftomassetti/intellij-community,slisson/intellij-community,ibinti/intellij-community,michaelgallacher/intellij-community,caot/intellij-community,lucafavatella/intellij-community,orekyuu/intellij-community,wreckJ/intellij-community,Lekanich/intellij-community,slisson/intellij-community,akosyakov/intellij-community,hurricup/intellij-community,jexp/idea2,ahb0327/intellij-community,consulo/consulo,samthor/intellij-community,fitermay/intellij-community,consulo/consulo,jexp/idea2,idea4bsd/idea4bsd,ibinti/intellij-community,allotria/intellij-community,michaelgallacher/intellij-community,adedayo/intellij-community,Distrotech/intellij-community,vladmm/intellij-community,salguarnieri/intellij-community,petteyg/intellij-community,ibinti/intellij-community,robovm/robovm-studio,adedayo/intellij-community,youdonghai/intellij-community,SerCeMan/intellij-community,vladmm/intellij-community,ivan-fedorov/intellij-community,caot/intellij-community,alphafoobar/intellij-community,blademainer/intellij-community,izonder/intellij-community,TangHao1987/intellij-community,ibinti/intellij-community,nicolargo/intellij-community,slisson/intellij-community,Lekanich/intellij-community,kool79/intellij-community,supersven/intellij-community,petteyg/intellij-community,akosyakov/intellij-community,orekyuu/intellij-community,amith01994/intellij-community,SerCeMan/intellij-community,allotria/intellij-community,pwoodworth/intellij-community,adedayo/intellij-community,xfournet/intellij-community,ahb0327/intellij-community,slisson/intellij-community,fitermay/intellij-community,jagguli/intellij-community,supersven/intellij-community,muntasirsyed/intellij-community,nicolargo/intellij-community,supersven/intellij-community,apixandru/intellij-community,apixandru/intellij-community,adedayo/intellij-community,vladmm/intellij-community,idea4bsd/idea4bsd,apixandru/intellij-community,blademainer/intellij-community,izonder/intellij-community,MichaelNedzelsky/intellij-community,fengbaicanhe/intellij-community,da1z/intellij-community,kdwink/intellij-community,TangHao1987/intellij-community,dslomov/intellij-community,TangHao1987/intellij-community,blademainer/intellij-community,vvv1559/intellij-community,xfournet/intellij-community,ThiagoGarciaAlves/intellij-community,petteyg/intellij-community,kdwink/intellij-community,muntasirsyed/intellij-community,fnouama/intellij-community,fnouama/intellij-community,ftomassetti/intellij-community,fnouama/intellij-community,Lekanich/intellij-community,ibinti/intellij-community,vladmm/intellij-community,ThiagoGarciaAlves/intellij-community,retomerz/intellij-community,ftomassetti/intellij-community,michaelgallacher/intellij-community,petteyg/intellij-community,asedunov/intellij-community,joewalnes/idea-community,TangHao1987/intellij-community,holmes/intellij-community,kdwink/intellij-community,orekyuu/intellij-community,Distrotech/intellij-community,suncycheng/intellij-community,youdonghai/intellij-community,slisson/intellij-community,alphafoobar/intellij-community,fengbaicanhe/intellij-community,ryano144/intellij-community,salguarnieri/intellij-community,salguarnieri/intellij-community,alphafoobar/intellij-community,supersven/intellij-community,tmpgit/intellij-community,ryano144/intellij-community,ThiagoGarciaAlves/intellij-community,mglukhikh/intellij-community,ThiagoGarciaAlves/intellij-community,SerCeMan/intellij-community,SerCeMan/intellij-community,signed/intellij-community,hurricup/intellij-community,lucafavatella/intellij-community,ol-loginov/intellij-community,Lekanich/intellij-community,michaelgallacher/intellij-community,ftomassetti/intellij-community,ftomassetti/intellij-community,amith01994/intellij-community,joewalnes/idea-community,consulo/consulo,xfournet/intellij-community,pwoodworth/intellij-community,kool79/intellij-community,pwoodworth/intellij-community,suncycheng/intellij-community,amith01994/intellij-community,ibinti/intellij-community,ThiagoGarciaAlves/intellij-community,ernestp/consulo,holmes/intellij-community,robovm/robovm-studio,tmpgit/intellij-community,diorcety/intellij-community,MichaelNedzelsky/intellij-community,tmpgit/intellij-community,adedayo/intellij-community,mglukhikh/intellij-community,tmpgit/intellij-community,ThiagoGarciaAlves/intellij-community,allotria/intellij-community,clumsy/intellij-community,clumsy/intellij-community,dslomov/intellij-community,apixandru/intellij-community,izonder/intellij-community,akosyakov/intellij-community,salguarnieri/intellij-community,youdonghai/intellij-community,akosyakov/intellij-community,jexp/idea2,alphafoobar/intellij-community,retomerz/intellij-community,vladmm/intellij-community,FHannes/intellij-community,supersven/intellij-community,gnuhub/intellij-community,fitermay/intellij-community,joewalnes/idea-community,semonte/intellij-community,fengbaicanhe/intellij-community,slisson/intellij-community,adedayo/intellij-community,allotria/intellij-community,TangHao1987/intellij-community,youdonghai/intellij-community,idea4bsd/idea4bsd,clumsy/intellij-community,pwoodworth/intellij-community,samthor/intellij-community,pwoodworth/intellij-community,holmes/intellij-community,samthor/intellij-community,idea4bsd/idea4bsd,idea4bsd/idea4bsd,MichaelNedzelsky/intellij-community,kdwink/intellij-community,caot/intellij-community,kool79/intellij-community,jagguli/intellij-community,fnouama/intellij-community,holmes/intellij-community,tmpgit/intellij-community,MER-GROUP/intellij-community,robovm/robovm-studio,nicolargo/intellij-community,nicolargo/intellij-community,blademainer/intellij-community,michaelgallacher/intellij-community,wreckJ/intellij-community,muntasirsyed/intellij-community,fitermay/intellij-community,diorcety/intellij-community,Lekanich/intellij-community,semonte/intellij-community,MER-GROUP/intellij-community,allotria/intellij-community,orekyuu/intellij-community,jagguli/intellij-community,asedunov/intellij-community,signed/intellij-community,fitermay/intellij-community,diorcety/intellij-community,suncycheng/intellij-community,jagguli/intellij-community,Lekanich/intellij-community,ahb0327/intellij-community,retomerz/intellij-community,ryano144/intellij-community,Lekanich/intellij-community,ol-loginov/intellij-community,fengbaicanhe/intellij-community,ernestp/consulo,fengbaicanhe/intellij-community,ftomassetti/intellij-community,TangHao1987/intellij-community,fnouama/intellij-community,signed/intellij-community,semonte/intellij-community,da1z/intellij-community,MER-GROUP/intellij-community,MER-GROUP/intellij-community,kool79/intellij-community,dslomov/intellij-community,caot/intellij-community,gnuhub/intellij-community,robovm/robovm-studio,allotria/intellij-community,ol-loginov/intellij-community,signed/intellij-community,jagguli/intellij-community,youdonghai/intellij-community,mglukhikh/intellij-community,salguarnieri/intellij-community,michaelgallacher/intellij-community,ibinti/intellij-community,wreckJ/intellij-community,jexp/idea2,muntasirsyed/intellij-community,idea4bsd/idea4bsd,jexp/idea2,mglukhikh/intellij-community,apixandru/intellij-community,petteyg/intellij-community,signed/intellij-community,ryano144/intellij-community,ftomassetti/intellij-community,gnuhub/intellij-community,consulo/consulo,ryano144/intellij-community,vvv1559/intellij-community,clumsy/intellij-community,mglukhikh/intellij-community,da1z/intellij-community,izonder/intellij-community,FHannes/intellij-community,da1z/intellij-community,izonder/intellij-community,jagguli/intellij-community,petteyg/intellij-community,retomerz/intellij-community,diorcety/intellij-community,hurricup/intellij-community,holmes/intellij-community,fitermay/intellij-community,semonte/intellij-community,mglukhikh/intellij-community,MichaelNedzelsky/intellij-community,ivan-fedorov/intellij-community,hurricup/intellij-community,da1z/intellij-community,ol-loginov/intellij-community,SerCeMan/intellij-community,MER-GROUP/intellij-community,kool79/intellij-community,lucafavatella/intellij-community,youdonghai/intellij-community,akosyakov/intellij-community,dslomov/intellij-community,izonder/intellij-community,pwoodworth/intellij-community,vvv1559/intellij-community,kdwink/intellij-community,jagguli/intellij-community,mglukhikh/intellij-community,TangHao1987/intellij-community,ryano144/intellij-community,ernestp/consulo,holmes/intellij-community,dslomov/intellij-community,kool79/intellij-community,vvv1559/intellij-community,orekyuu/intellij-community,amith01994/intellij-community,idea4bsd/idea4bsd,kdwink/intellij-community,wreckJ/intellij-community,suncycheng/intellij-community,amith01994/intellij-community,robovm/robovm-studio,izonder/intellij-community,retomerz/intellij-community,izonder/intellij-community,MichaelNedzelsky/intellij-community,jagguli/intellij-community,blademainer/intellij-community,akosyakov/intellij-community,vladmm/intellij-community,youdonghai/intellij-community,fengbaicanhe/intellij-community,caot/intellij-community,akosyakov/intellij-community,Distrotech/intellij-community,slisson/intellij-community,kdwink/intellij-community,gnuhub/intellij-community,vladmm/intellij-community,clumsy/intellij-community,allotria/intellij-community,ahb0327/intellij-community,blademainer/intellij-community,dslomov/intellij-community,Distrotech/intellij-community
/* * Copyright (c) 2005 JetBrains s.r.o. All Rights Reserved. */ package com.intellij.util.io; import org.jetbrains.annotations.NonNls; import java.io.*; import java.nio.ByteBuffer; import java.nio.MappedByteBuffer; import java.nio.channels.FileChannel; /** * @author max */ public class MappedFile { private ByteBufferUtil.ByteBufferHolder myHolder; private final File myFile; private long myRealSize; private long mySize; private long myPosition; @NonNls private static final String UTF_8_CHARSET_NAME = "UTF-8"; @NonNls private static final String RW = "rw"; private byte[] buffer = new byte[8]; public MappedFile(File file, int initialSize) throws IOException { myFile = file; myPosition = 0; map(); mySize = myRealSize; if (mySize == 0) { resize(initialSize); } } private void map() throws IOException { RandomAccessFile raf = new RandomAccessFile(myFile, RW); final FileChannel channel = raf.getChannel(); MappedByteBuffer buf = null; try { buf = channel.map(FileChannel.MapMode.READ_WRITE, 0, raf.length()); } catch (IOException e) { new RuntimeException("Mapping failed: " + myFile.getAbsolutePath(), e); } finally { channel.close(); raf.close(); } if (buf == null) { new RuntimeException("Mapping failed: " + myFile.getAbsolutePath()); } myHolder = new ByteBufferUtil.ByteBufferHolder(buf, myFile); myRealSize = myFile.length(); buf.position((int)myPosition); ByteBufferUtil.TOTAL_MAPPED_BYTES += myRealSize; } public short getShort(int index) throws IOException { seek(index); return readShort(); } public short readShort() throws IOException { get(buffer, 0, 2); int ch1 = buffer[0] & 0xff; int ch2 = buffer[1] & 0xff; return (short)((ch1 << 8) + ch2); } public void putShort(int index, short value) throws IOException { seek(index); writeShort(value); } public void writeShort(int value) throws IOException { buffer[0] = (byte)((value >>> 8) & 0xFF); buffer[1] = (byte)(value & 0xFF); put(buffer, 0, 2); } public int getInt(int index) throws IOException { seek(index); return readInt(); } public void putInt(int index, int value) throws IOException { seek(index); writeInt(value); } public byte get(int index) throws IOException { seek(index); return readByte(); } public void put(int index, byte value) throws IOException { seek(index); writeByte(value); } public void get(int index, byte[] dst, int offset, int length) throws IOException { seek(index); get(dst, offset, length); } public void get(final byte[] dst, final int offset, final int length) throws IOException { if (myPosition + length > mySize) { throw new EOFException(); } myHolder.getBuffer().get(dst, offset, length); myPosition += length; } public void put(int index, byte[] src, int offset, int length) throws IOException { seek(index); put(src, offset, length); } public void seek(long pos) throws IOException { ensureSize(pos); myHolder.getBuffer().position((int)pos); myPosition = pos; if (pos > mySize) { mySize = pos; } } private void ensureSize(final long pos) throws IOException { while (pos >= myRealSize) { expand(); } } private void expand() throws IOException { resize((int)((myRealSize + 1) * 13) >> 3); } public void put(final byte[] src, final int offset, final int length) throws IOException { ensureSize(myPosition + length); myHolder.getBuffer().put(src, offset, length); myPosition += length; if (myPosition > mySize) { mySize = myPosition; } } public void flush() { final ByteBuffer buf = myHolder.getBuffer(); if (buf instanceof MappedByteBuffer) { ((MappedByteBuffer)buf).force(); } } public void close() { unmap(); try { RandomAccessFile raf = new RandomAccessFile(myFile, RW); raf.setLength(mySize); raf.close(); } catch (IOException e) { throw new RuntimeException(e); } } public void resize(int size) throws IOException { final int current = (int)myRealSize; if (current == size) return; unmap(); RandomAccessFile raf = new RandomAccessFile(myFile, RW); raf.setLength(size); raf.close(); map(); } public final long length() { return mySize; } public long getFilePointer() { return myPosition; } public int readInt() throws IOException { get(buffer, 0, 4); int ch1 = buffer[0] & 0xff; int ch2 = buffer[1] & 0xff; int ch3 = buffer[2] & 0xff; int ch4 = buffer[3] & 0xff; return ((ch1 << 24) + (ch2 << 16) + (ch3 << 8) + ch4); } public void writeInt(int value) throws IOException { buffer[0] = (byte)((value >>> 24) & 0xFF); buffer[1] = (byte)((value >>> 16) & 0xFF); buffer[2] = (byte)((value >>> 8) & 0xFF); buffer[3] = (byte)(value & 0xFF); put(buffer, 0, 4); } public String readUTF() throws IOException { try { int len = readInt(); byte[] bytes = new byte[ len ]; get(bytes, 0, len); return new String(bytes, UTF_8_CHARSET_NAME); } catch (UnsupportedEncodingException e) { // Can't be return ""; } } public void writeUTF(String value) throws IOException { try { final byte[] bytes = value.getBytes(UTF_8_CHARSET_NAME); writeInt(bytes.length); put(bytes, 0, bytes.length); } catch (UnsupportedEncodingException e) { // Can't be } } public int readUnsignedShort() throws IOException { get(buffer, 0, 2); int ch1 = buffer[0] & 0xff; int ch2 = buffer[1] & 0xff; return (ch1 << 8) + ch2; } public char readChar() throws IOException { return (char)readUnsignedShort(); } public void writeChar(char value) throws IOException { writeShort(value); } public byte readByte() throws IOException { get(buffer, 0, 1); return buffer[0]; } public void writeByte(byte value) throws IOException { buffer[0] = value; put(buffer, 0, 1); } private void unmap() { if (myHolder != null) { flush(); ByteBufferUtil.unmapMappedByteBuffer(myHolder); } } }
util/src/com/intellij/util/io/MappedFile.java
/* * Copyright (c) 2005 JetBrains s.r.o. All Rights Reserved. */ package com.intellij.util.io; import org.jetbrains.annotations.NonNls; import java.io.*; import java.nio.ByteBuffer; import java.nio.MappedByteBuffer; import java.nio.channels.FileChannel; /** * @author max */ public class MappedFile { private ByteBufferUtil.ByteBufferHolder myHolder; private final File myFile; private long myRealSize; private long mySize; private long myPosition; @NonNls private static final String UTF_8_CHARSET_NAME = "UTF-8"; @NonNls private static final String RW = "rw"; private byte[] buffer = new byte[8]; public MappedFile(File file, int initialSize) throws IOException { myFile = file; myPosition = 0; map(); mySize = myRealSize; if (mySize == 0) { resize(initialSize); } } private void map() throws IOException { RandomAccessFile raf = new RandomAccessFile(myFile, RW); final FileChannel channel = raf.getChannel(); MappedByteBuffer buf = null; try { buf = channel.map(FileChannel.MapMode.READ_WRITE, 0, raf.length()); } catch (IOException e) { new RuntimeException("Mapping failed: " + myFile.getAbsolutePath(), e); } finally { channel.close(); raf.close(); } if (buf == null) { new RuntimeException("Mapping failed: " + myFile.getAbsolutePath()); } myHolder = new ByteBufferUtil.ByteBufferHolder(buf, myFile); myRealSize = myFile.length(); buf.position((int)myPosition); ByteBufferUtil.TOTAL_MAPPED_BYTES += myRealSize; } public short getShort(int index) throws IOException { seek(index); return readShort(); } public short readShort() throws IOException { get(buffer, 0, 2); int ch1 = buffer[0] & 0xff; int ch2 = buffer[1] & 0xff; return (short)((ch1 << 8) + ch2); } public void putShort(int index, short value) throws IOException { seek(index); writeShort(value); } public void writeShort(int value) throws IOException { buffer[0] = (byte)((value >>> 8) & 0xFF); buffer[1] = (byte)(value & 0xFF); put(buffer, 0, 2); } public int getInt(int index) throws IOException { seek(index); return readInt(); } public void putInt(int index, int value) throws IOException { seek(index); writeInt(value); } public byte get(int index) throws IOException { seek(index); return readByte(); } public void put(int index, byte value) throws IOException { seek(index); writeByte(value); } public void get(int index, byte[] dst, int offset, int length) throws IOException { seek(index); get(dst, offset, length); } public void get(final byte[] dst, final int offset, final int length) throws IOException { if (myPosition + length > mySize) { throw new EOFException(); } myHolder.getBuffer().get(dst, offset, length); myPosition += length; } public void put(int index, byte[] src, int offset, int length) throws IOException { seek(index); put(src, offset, length); } public void seek(long pos) throws IOException { ensureSize(pos); myHolder.getBuffer().position((int)pos); myPosition = pos; if (pos > mySize) { mySize = pos; } } private void ensureSize(final long pos) throws IOException { while (pos >= myRealSize) { expand(); } } private void expand() throws IOException { resize((int)(myRealSize + Math.max(myRealSize, 1024 * 32))); } public void put(final byte[] src, final int offset, final int length) throws IOException { ensureSize(myPosition + length); myHolder.getBuffer().put(src, offset, length); myPosition += length; if (myPosition > mySize) { mySize = myPosition; } } public void flush() { final ByteBuffer buf = myHolder.getBuffer(); if (buf instanceof MappedByteBuffer) { ((MappedByteBuffer)buf).force(); } } public void close() { unmap(); try { RandomAccessFile raf = new RandomAccessFile(myFile, RW); raf.setLength(mySize); raf.close(); } catch (IOException e) { throw new RuntimeException(e); } } public void resize(int size) throws IOException { final int current = (int)myRealSize; if (current == size) return; unmap(); RandomAccessFile raf = new RandomAccessFile(myFile, RW); raf.setLength(size); raf.close(); map(); } public final long length() { return mySize; } public long getFilePointer() { return myPosition; } public int readInt() throws IOException { get(buffer, 0, 4); int ch1 = buffer[0] & 0xff; int ch2 = buffer[1] & 0xff; int ch3 = buffer[2] & 0xff; int ch4 = buffer[3] & 0xff; return ((ch1 << 24) + (ch2 << 16) + (ch3 << 8) + ch4); } public void writeInt(int value) throws IOException { buffer[0] = (byte)((value >>> 24) & 0xFF); buffer[1] = (byte)((value >>> 16) & 0xFF); buffer[2] = (byte)((value >>> 8) & 0xFF); buffer[3] = (byte)(value & 0xFF); put(buffer, 0, 4); } public String readUTF() throws IOException { try { int len = readInt(); byte[] bytes = new byte[ len ]; get(bytes, 0, len); return new String(bytes, UTF_8_CHARSET_NAME); } catch (UnsupportedEncodingException e) { // Can't be return ""; } } public void writeUTF(String value) throws IOException { try { final byte[] bytes = value.getBytes(UTF_8_CHARSET_NAME); writeInt(bytes.length); put(bytes, 0, bytes.length); } catch (UnsupportedEncodingException e) { // Can't be } } public int readUnsignedShort() throws IOException { get(buffer, 0, 2); int ch1 = buffer[0] & 0xff; int ch2 = buffer[1] & 0xff; return (ch1 << 8) + ch2; } public char readChar() throws IOException { return (char)readUnsignedShort(); } public void writeChar(char value) throws IOException { writeShort(value); } public byte readByte() throws IOException { get(buffer, 0, 1); return buffer[0]; } public void writeByte(byte value) throws IOException { buffer[0] = value; put(buffer, 0, 1); } private void unmap() { if (myHolder != null) { flush(); ByteBufferUtil.unmapMappedByteBuffer(myHolder); } } }
smarter expand strategy (mulpiply size on the golden section approximation)
util/src/com/intellij/util/io/MappedFile.java
smarter expand strategy (mulpiply size on the golden section approximation)
<ide><path>til/src/com/intellij/util/io/MappedFile.java <ide> } <ide> <ide> private void expand() throws IOException { <del> resize((int)(myRealSize + Math.max(myRealSize, 1024 * 32))); <add> resize((int)((myRealSize + 1) * 13) >> 3); <ide> } <ide> <ide> public void put(final byte[] src, final int offset, final int length) throws IOException {
Java
apache-2.0
2efa77541fd2444281c36773f5a0899483fbc6a2
0
clodoaldoBasaglia/scheduler
/* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ package sheduler.consumidor; import java.sql.Connection; import java.sql.DriverManager; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import java.util.ArrayList; import java.util.LinkedList; import java.util.List; import java.util.Queue; import java.util.logging.Level; import java.util.logging.Logger; /** * * @author personal */ public final class PConfig { //Inicio da area de Configuração //Host do Postgre private String pHost = "localhost"; //Porta do Postgre private Integer pPort = 5432; //Banco de dados alvo private String pDb = "teste"; //Usuário do Postgre private String pUser = "postgres"; //Senha do Postgre private String pPw = "supersix123"; //Tabela da schedule private String tableSchedule = "schedule"; //Sql para obter Tupla private String sqlGetRow = "SELECT * FROM " + tableSchedule + " WHERE idoperacao = ?"; //Sql para deletar Tupla private String sqlDeleteRow = "DELETE FROM " + tableSchedule + " WHERE idoperacao = ?"; //Fim da area de Configuração String url = "jdbc:postgresql://" + pHost + ":" + pPort + "/" + pDb; Connection conn; public Connection startConnection() { try { Class.forName("org.postgresql.Driver"); conn = DriverManager.getConnection(url, pUser, pPw); if (!conn.isClosed()) { System.out.println("conexão realizada"); Runnable codigoThread = new Runnable() { @Override public void run() { Statement stm; try { stm = conn.createStatement(); ResultSet pRs = stm.executeQuery("SELECT * FROM schedule;"); diferenciador(pRs); } catch (SQLException ex) { Logger.getLogger(PConfig.class.getName()).log(Level.SEVERE, null, ex); } } private void diferenciador(ResultSet pRs) { try { while (pRs.next()) { System.out.println(pRs.getArray("idoperacao")); } } catch (SQLException ex) { Logger.getLogger(PConfig.class.getName()).log(Level.SEVERE, null, ex); } } }; Thread tLeitura = new Thread(codigoThread); tLeitura.start(); /*Statement stm = conn.createStatement(); ResultSet pRs = stm.executeQuery("SELECT nome, idade, pai\n" + " FROM public.cadastro;"); while(pRs.next()) { System.out.println(pRs.getArray(1) + " " + pRs.getInt(2) + " " + pRs.getArray(3)); } pRs.close(); conn.close();*/ return conn; } } catch (Exception e) { e.printStackTrace(); } return null; } public Integer stopConnection(Connection conn) { try { conn.close(); return 1; } catch (Exception e) { e.printStackTrace(); } return 0; } //Inicia Prepared Statement private PreparedStatement initPreparedStatement(Connection conn, String sql) { try { PreparedStatement stmt = conn.prepareStatement(sql); return stmt; } catch (Exception e) { e.printStackTrace(); return null; } } //Se tem Próxima Tupla private Boolean hasNextRow(Connection conn, Integer i) { try { PreparedStatement stmt = initPreparedStatement(conn, sqlGetRow); stmt.setInt(1, i); ResultSet pRs = stmt.executeQuery(); if (pRs.next()) { return true; } } catch (Exception e) { e.printStackTrace(); } return false; } //Executa a query e retorna um Schedule, com dados da tupla i private Schedule getRow(Connection conn, Integer i) { try { Schedule sched = new Schedule(); PreparedStatement stmt = initPreparedStatement(conn, sqlGetRow); stmt.setInt(1, i); ResultSet pRs = stmt.executeQuery(); if (pRs.next()) { sched.setIdOperacao(pRs.getInt("idoperacao")); sched.setIndiceTransacao(pRs.getInt("indicetransacao")); sched.setOperacao(pRs.getString("operacao")); sched.setItemDado(pRs.getString("itemdado")); sched.setTimeStampJ(pRs.getTimestamp("timestampj")); pRs.close(); stmt.close(); return sched; } else { pRs.close(); stmt.close(); return null; } } catch (Exception e) { e.printStackTrace(); } return null; } //Deleta a tupla i private Integer deleteRow(Connection conn, Integer i) { try { PreparedStatement stmt = initPreparedStatement(conn, sqlDeleteRow); stmt.setInt(1, i); if (stmt.executeUpdate() == 1) { stmt.close(); return 1; } else { return 0; } } catch (Exception e) { e.printStackTrace(); } return 0; } //Verifica se o dado já está na lista de lock private Boolean isInLock(Schedule sched, List<ItemDadoLock> list) { int i = 0; while (i < list.size()) { if (list.get(i).getItemDado().equals(sched.getItemDado())) { return true; } i++; } return false; } private Integer indexOfLock(Schedule sched, List<ItemDadoLock> list) { int i = 0; while (i < list.size()) { if (list.get(i).getItemDado().equals(sched.getItemDado())) { return i; } i++; } return 0; } //Execução do Bloqueio compartilhado //Semelhante ao slide 02 pag 10 private void execLockS(Schedule sched, List<ItemDadoLock> lock, List startSched, List commitSched, List sharedLock, List exclusiveLock, Queue aborts) { //Pega indice do ItemDado na lista lock int i = indexOfLock(sched, lock); if (lock.get(i).equals('U')) { sharedLock.add(lock.get(i)); lock.get(i).setLock('S'); } else if (lock.get(i).equals('S')) { sharedLock.add(lock.get(i)); } else if (lock.get(i).equals('X')) { exclusiveLock.add(lock.get(i)); } } //Método para executar as schedules private void execSchedule(Schedule sched, List<ItemDadoLock> lock, List startSched, List commitSched, List sharedLock, List exclusiveLock, Queue aborts) { //Se a operação é S, adiciona na lista de iniciados if (sched.getOperacao().equals("S")) { startSched.add(sched); } //Se não verifica e termina a Schedule else if (sched.getOperacao().equals("E")) { //verificarETerminarSchedule(); } else { //Se não tiver o dado na lista de Lock, adiciona como Unlocked if (!(isInLock(sched, lock))) { ItemDadoLock temp = new ItemDadoLock(); temp.setItemDado(sched.getItemDado()); temp.setLock('U'); } else { if (sched.getOperacao().equals("R")) { execLockS(sched, lock, startSched, commitSched, sharedLock, exclusiveLock, aborts); } } } } public PConfig() { //Cria conexão e inicia Connection conn = startConnection(); //Lista de bloqueios List lock = new ArrayList<ItemDadoLock>(); //Cria as filas de bloqueio compartilhado, exclusivo e abortados List startSched = new ArrayList<Schedule>(); List commitSched = new ArrayList<Schedule>(); List sharedLock = new ArrayList<Schedule>(); List exclusiveLock = new ArrayList<Schedule>(); Queue aborts = new LinkedList(); int i = 0; while (true) { if (hasNextRow(conn, i)) { Schedule sched = getRow(conn, i); //execSchedule(sched,lock,startSched,commitSched,sharedLock,exclusiveLock,aborts); //deleteRow(conn, i); System.out.print("foi"); System.out.println(sched); } //Fazer else if pra quando apertar enter cancelar o consumidor //E fazer espera else { break; } //i++; } //Termina conexão if (stopConnection(conn) == 1) { System.out.println("Conexão encerrada"); } else { System.out.println("Conexão não encerrada"); } } }
sheduler/src/sheduler/consumidor/PConfig.java
/* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ package sheduler.consumidor; import java.sql.Connection; import java.sql.DriverManager; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import java.util.ArrayList; import java.util.LinkedList; import java.util.List; import java.util.Queue; import java.util.logging.Level; import java.util.logging.Logger; /** * * @author personal */ public final class PConfig { //Inicio da area de Configuração //Host do Postgre private String pHost = "localhost"; //Porta do Postgre private Integer pPort = 5432; //Banco de dados alvo private String pDb = "teste"; //Usuário do Postgre private String pUser = "postgres"; //Senha do Postgre private String pPw = "supersix123"; //Tabela da schedule private String tableSchedule = "schedule"; //Sql para obter Tupla private String sqlGetRow = "SELECT * FROM " + tableSchedule + " WHERE idoperacao = ?"; //Sql para deletar Tupla private String sqlDeleteRow = "DELETE FROM " + tableSchedule + " WHERE idoperacao = ?"; //Fim da area de Configuração String url = "jdbc:postgresql://" + pHost + ":" + pPort + "/" + pDb; Connection conn; public Connection startConnection() { try { Class.forName("org.postgresql.Driver"); conn = DriverManager.getConnection(url, pUser, pPw); if (!conn.isClosed()) { System.out.println("conexão realizada"); Runnable codigoThread = new Runnable() { @Override public void run() { Statement stm; try { stm = conn.createStatement(); ResultSet pRs = stm.executeQuery("SELECT * FROM schedule;"); while (pRs.next()) { System.out.println(pRs.getArray("idoperacao")); } } catch (SQLException ex) { Logger.getLogger(PConfig.class.getName()).log(Level.SEVERE, null, ex); } } }; Thread tLeitura = new Thread(codigoThread); tLeitura.start(); /*Statement stm = conn.createStatement(); ResultSet pRs = stm.executeQuery("SELECT nome, idade, pai\n" + " FROM public.cadastro;"); while(pRs.next()) { System.out.println(pRs.getArray(1) + " " + pRs.getInt(2) + " " + pRs.getArray(3)); } pRs.close(); conn.close();*/ return conn; } } catch (Exception e) { e.printStackTrace(); } return null; } public Integer stopConnection(Connection conn) { try { conn.close(); return 1; } catch (Exception e) { e.printStackTrace(); } return 0; } //Inicia Prepared Statement private PreparedStatement initPreparedStatement(Connection conn, String sql) { try { PreparedStatement stmt = conn.prepareStatement(sql); return stmt; } catch (Exception e) { e.printStackTrace(); return null; } } //Se tem Próxima Tupla private Boolean hasNextRow(Connection conn, Integer i) { try { PreparedStatement stmt = initPreparedStatement(conn, sqlGetRow); stmt.setInt(1, i); ResultSet pRs = stmt.executeQuery(); if (pRs.next()) { return true; } } catch (Exception e) { e.printStackTrace(); } return false; } //Executa a query e retorna um Schedule, com dados da tupla i private Schedule getRow(Connection conn, Integer i) { try { Schedule sched = new Schedule(); PreparedStatement stmt = initPreparedStatement(conn, sqlGetRow); stmt.setInt(1, i); ResultSet pRs = stmt.executeQuery(); if (pRs.next()) { sched.setIdOperacao(pRs.getInt("idoperacao")); sched.setIndiceTransacao(pRs.getInt("indicetransacao")); sched.setOperacao(pRs.getString("operacao")); sched.setItemDado(pRs.getString("itemdado")); sched.setTimeStampJ(pRs.getTimestamp("timestampj")); pRs.close(); stmt.close(); return sched; } else { pRs.close(); stmt.close(); return null; } } catch (Exception e) { e.printStackTrace(); } return null; } //Deleta a tupla i private Integer deleteRow(Connection conn, Integer i) { try { PreparedStatement stmt = initPreparedStatement(conn, sqlDeleteRow); stmt.setInt(1, i); if (stmt.executeUpdate() == 1) { stmt.close(); return 1; } else { return 0; } } catch (Exception e) { e.printStackTrace(); } return 0; } //Verifica se o dado já está na lista de lock private Boolean isInLock(Schedule sched, List<ItemDadoLock> list) { int i = 0; while (i < list.size()) { if (list.get(i).getItemDado().equals(sched.getItemDado())) { return true; } i++; } return false; } private Integer indexOfLock(Schedule sched, List<ItemDadoLock> list) { int i = 0; while (i < list.size()) { if (list.get(i).getItemDado().equals(sched.getItemDado())) { return i; } i++; } return 0; } //Execução do Bloqueio compartilhado //Semelhante ao slide 02 pag 10 private void execLockS(Schedule sched, List<ItemDadoLock> lock, List startSched, List commitSched, List sharedLock, List exclusiveLock, Queue aborts) { //Pega indice do ItemDado na lista lock int i = indexOfLock(sched, lock); if (lock.get(i).equals('U')) { sharedLock.add(lock.get(i)); lock.get(i).setLock('S'); } else if (lock.get(i).equals('S')) { sharedLock.add(lock.get(i)); } else if (lock.get(i).equals('X')) { exclusiveLock.add(lock.get(i)); } } //Método para executar as schedules private void execSchedule(Schedule sched, List<ItemDadoLock> lock, List startSched, List commitSched, List sharedLock, List exclusiveLock, Queue aborts) { //Se a operação é S, adiciona na lista de iniciados if (sched.getOperacao().equals("S")) { startSched.add(sched); } //Se não verifica e termina a Schedule else if (sched.getOperacao().equals("E")) { //verificarETerminarSchedule(); } else { //Se não tiver o dado na lista de Lock, adiciona como Unlocked if (!(isInLock(sched, lock))) { ItemDadoLock temp = new ItemDadoLock(); temp.setItemDado(sched.getItemDado()); temp.setLock('U'); } else { if (sched.getOperacao().equals("R")) { execLockS(sched, lock, startSched, commitSched, sharedLock, exclusiveLock, aborts); } } } } public PConfig() { //Cria conexão e inicia Connection conn = startConnection(); //Lista de bloqueios List lock = new ArrayList<ItemDadoLock>(); //Cria as filas de bloqueio compartilhado, exclusivo e abortados List startSched = new ArrayList<Schedule>(); List commitSched = new ArrayList<Schedule>(); List sharedLock = new ArrayList<Schedule>(); List exclusiveLock = new ArrayList<Schedule>(); Queue aborts = new LinkedList(); int i = 0; while (true) { if (hasNextRow(conn, i)) { Schedule sched = getRow(conn, i); //execSchedule(sched,lock,startSched,commitSched,sharedLock,exclusiveLock,aborts); //deleteRow(conn, i); System.out.print("foi"); System.out.println(sched); } //Fazer else if pra quando apertar enter cancelar o consumidor //E fazer espera else { break; } //i++; } //Termina conexão if (stopConnection(conn) == 1) { System.out.println("Conexão encerrada"); } else { System.out.println("Conexão não encerrada"); } } }
lendo dados;
sheduler/src/sheduler/consumidor/PConfig.java
lendo dados;
<ide><path>heduler/src/sheduler/consumidor/PConfig.java <ide> try { <ide> stm = conn.createStatement(); <ide> ResultSet pRs = stm.executeQuery("SELECT * FROM schedule;"); <add> diferenciador(pRs); <add> } catch (SQLException ex) { <add> Logger.getLogger(PConfig.class.getName()).log(Level.SEVERE, null, ex); <add> } <add> } <add> <add> private void diferenciador(ResultSet pRs) { <add> try { <ide> while (pRs.next()) { <ide> System.out.println(pRs.getArray("idoperacao")); <ide> } <ide> } <ide> return null; <ide> } <del> <del> <ide> <ide> public Integer stopConnection(Connection conn) { <ide> try {
JavaScript
mit
ebf9350530fd3b2d67169820953f4040eb6ebd1a
0
Flexberry/ember-flexberry-designer,Flexberry/ember-flexberry-designer,Flexberry/ember-flexberry-designer
import Ember from 'ember'; import ListFormController from 'ember-flexberry/controllers/list-form'; export default ListFormController.extend({ /** Name of related edit form route. @property editFormRoute @type String @default 'fd-generation-process-form' */ editFormRoute: 'fd-generation-process-form', /** Service that triggers objectlistview events. @property objectlistviewEventsService @type {Class} @default Ember.inject.service() */ objectlistviewEventsService: Ember.inject.service('objectlistview-events'), currentProjectContext: Ember.inject.service('fd-current-project-context'), generationService: Ember.inject.service('fd-generation'), /** Property to form array of special structures of custom user buttons. @property customButtons @type Array */ customButtons: Ember.computed('i18n.locale', function() { let i18n = this.get('i18n'); return [{ buttonName: i18n.t('forms.fd-generation-list-form.generation-button.caption'), buttonAction: 'generationStartButtonClick', buttonClasses: 'generation-start-button', buttonTitle: i18n.t('forms.fd-generation-list-form.generation-button.title') }]; }), actions: { /** Handler for click on generate button. @method actions.generationStartButtonClick */ generationStartButtonClick() { let _this = this; _this.get('objectlistviewEventsService').setLoadingState('loading'); let stagePk = _this.get('currentProjectContext').getCurrentStage(); let host = _this.get('store').adapterFor('application').host; Ember.$.ajax({ type: 'GET', xhrFields: { withCredentials: true }, url: `${host}/Generate(project=${stagePk})`, success(result) { _this.set('generationService.lastGenerationToken', result); result = result || {}; _this.get('objectlistviewEventsService').setLoadingState(''); _this.transitionToRoute(_this.get('editFormRoute'), Ember.get(result, 'value')); }, error() { _this.get('objectlistviewEventsService').setLoadingState(''); _this.set('error', new Error(_this.get('i18n').t('forms.fd-generation-process-form.connection-error-text'))); }, }); } }, /** Method to get type and attributes of a component, which will be embeded in object-list-view cell. @method getCellComponent. @param {Object} attr Attribute of projection property related to current table cell. @param {String} bindingPath Path to model property related to current table cell. @param {Object} modelClass Model class of data record related to current table row. @return {Object} Object containing name & properties of component, which will be used to render current table cell. { componentName: 'my-component', componentProperties: { ... } }. */ getCellComponent: function(attr, bindingPath) { if (bindingPath === 'startTime' || bindingPath === 'endTime') { return { componentName: 'object-list-view-cell', componentProperties: { dateFormat: 'DD.MM.YYYY, HH:mm:ss' } }; } return this._super(...arguments); }, });
addon/controllers/fd-generation-list-form.js
import Ember from 'ember'; import ListFormController from 'ember-flexberry/controllers/list-form'; export default ListFormController.extend({ /** Name of related edit form route. @property editFormRoute @type String @default 'fd-generation-process-form' */ editFormRoute: 'fd-generation-process-form', currentProjectContext: Ember.inject.service('fd-current-project-context'), generationService: Ember.inject.service('fd-generation'), /** Property to form array of special structures of custom user buttons. @property customButtons @type Array */ customButtons: Ember.computed('i18n.locale', function() { let i18n = this.get('i18n'); return [{ buttonName: i18n.t('forms.fd-generation-list-form.generation-button.caption'), buttonAction: 'generationStartButtonClick', buttonClasses: 'generation-start-button', buttonTitle: i18n.t('forms.fd-generation-list-form.generation-button.title') }]; }), actions: { /** Handler for click on generate button. @method actions.generationStartButtonClick */ generationStartButtonClick() { let _this = this; let stagePk = _this.get('currentProjectContext').getCurrentStage(); let host = _this.get('store').adapterFor('application').host; Ember.$.ajax({ type: 'GET', xhrFields: { withCredentials: true }, url: `${host}/Generate(project=${stagePk})`, success(result) { _this.set('generationService.lastGenerationToken', result); result = result || {}; _this.transitionToRoute(_this.get('editFormRoute'), Ember.get(result, 'value')); }, error() { }, }); } }, /** Method to get type and attributes of a component, which will be embeded in object-list-view cell. @method getCellComponent. @param {Object} attr Attribute of projection property related to current table cell. @param {String} bindingPath Path to model property related to current table cell. @param {Object} modelClass Model class of data record related to current table row. @return {Object} Object containing name & properties of component, which will be used to render current table cell. { componentName: 'my-component', componentProperties: { ... } }. */ getCellComponent: function(attr, bindingPath) { if (bindingPath === 'startTime' || bindingPath === 'endTime') { return { componentName: 'object-list-view-cell', componentProperties: { dateFormat: 'DD.MM.YYYY, HH:mm:ss' } }; } return this._super(...arguments); }, });
Add loading in generation list
addon/controllers/fd-generation-list-form.js
Add loading in generation list
<ide><path>ddon/controllers/fd-generation-list-form.js <ide> @default 'fd-generation-process-form' <ide> */ <ide> editFormRoute: 'fd-generation-process-form', <add> <add> /** <add> Service that triggers objectlistview events. <add> <add> @property objectlistviewEventsService <add> @type {Class} <add> @default Ember.inject.service() <add> */ <add> objectlistviewEventsService: Ember.inject.service('objectlistview-events'), <ide> <ide> currentProjectContext: Ember.inject.service('fd-current-project-context'), <ide> <ide> */ <ide> generationStartButtonClick() { <ide> let _this = this; <add> _this.get('objectlistviewEventsService').setLoadingState('loading'); <ide> let stagePk = _this.get('currentProjectContext').getCurrentStage(); <ide> let host = _this.get('store').adapterFor('application').host; <ide> Ember.$.ajax({ <ide> success(result) { <ide> _this.set('generationService.lastGenerationToken', result); <ide> result = result || {}; <add> _this.get('objectlistviewEventsService').setLoadingState(''); <ide> _this.transitionToRoute(_this.get('editFormRoute'), Ember.get(result, 'value')); <ide> }, <ide> error() { <del> <add> _this.get('objectlistviewEventsService').setLoadingState(''); <add> _this.set('error', new Error(_this.get('i18n').t('forms.fd-generation-process-form.connection-error-text'))); <ide> }, <ide> }); <ide> }
Java
apache-2.0
259a021ae5d7a36f0ebbf1d798dc8251ed688dbe
0
noondaysun/sakai,rodriguezdevera/sakai,Fudan-University/sakai,udayg/sakai,surya-janani/sakai,whumph/sakai,liubo404/sakai,tl-its-umich-edu/sakai,lorenamgUMU/sakai,liubo404/sakai,noondaysun/sakai,conder/sakai,frasese/sakai,ktakacs/sakai,puramshetty/sakai,puramshetty/sakai,surya-janani/sakai,willkara/sakai,ouit0408/sakai,tl-its-umich-edu/sakai,bzhouduke123/sakai,clhedrick/sakai,colczr/sakai,wfuedu/sakai,buckett/sakai-gitflow,whumph/sakai,pushyamig/sakai,duke-compsci290-spring2016/sakai,willkara/sakai,puramshetty/sakai,wfuedu/sakai,puramshetty/sakai,kwedoff1/sakai,pushyamig/sakai,bkirschn/sakai,joserabal/sakai,OpenCollabZA/sakai,kingmook/sakai,udayg/sakai,clhedrick/sakai,duke-compsci290-spring2016/sakai,puramshetty/sakai,noondaysun/sakai,whumph/sakai,tl-its-umich-edu/sakai,kwedoff1/sakai,surya-janani/sakai,tl-its-umich-edu/sakai,bzhouduke123/sakai,OpenCollabZA/sakai,frasese/sakai,introp-software/sakai,Fudan-University/sakai,buckett/sakai-gitflow,udayg/sakai,kingmook/sakai,noondaysun/sakai,udayg/sakai,whumph/sakai,ouit0408/sakai,hackbuteer59/sakai,liubo404/sakai,ouit0408/sakai,joserabal/sakai,puramshetty/sakai,zqian/sakai,liubo404/sakai,bzhouduke123/sakai,tl-its-umich-edu/sakai,pushyamig/sakai,duke-compsci290-spring2016/sakai,ktakacs/sakai,bzhouduke123/sakai,pushyamig/sakai,ktakacs/sakai,bzhouduke123/sakai,colczr/sakai,frasese/sakai,clhedrick/sakai,rodriguezdevera/sakai,Fudan-University/sakai,Fudan-University/sakai,Fudan-University/sakai,zqian/sakai,ouit0408/sakai,colczr/sakai,pushyamig/sakai,ouit0408/sakai,surya-janani/sakai,colczr/sakai,introp-software/sakai,joserabal/sakai,OpenCollabZA/sakai,OpenCollabZA/sakai,conder/sakai,liubo404/sakai,willkara/sakai,kwedoff1/sakai,whumph/sakai,willkara/sakai,kingmook/sakai,willkara/sakai,kwedoff1/sakai,buckett/sakai-gitflow,kingmook/sakai,hackbuteer59/sakai,liubo404/sakai,udayg/sakai,duke-compsci290-spring2016/sakai,frasese/sakai,kingmook/sakai,ouit0408/sakai,hackbuteer59/sakai,buckett/sakai-gitflow,bkirschn/sakai,Fudan-University/sakai,conder/sakai,lorenamgUMU/sakai,ktakacs/sakai,zqian/sakai,rodriguezdevera/sakai,kwedoff1/sakai,introp-software/sakai,bzhouduke123/sakai,whumph/sakai,buckett/sakai-gitflow,wfuedu/sakai,puramshetty/sakai,frasese/sakai,bkirschn/sakai,colczr/sakai,hackbuteer59/sakai,liubo404/sakai,duke-compsci290-spring2016/sakai,tl-its-umich-edu/sakai,joserabal/sakai,introp-software/sakai,bzhouduke123/sakai,ktakacs/sakai,ouit0408/sakai,liubo404/sakai,conder/sakai,hackbuteer59/sakai,Fudan-University/sakai,lorenamgUMU/sakai,joserabal/sakai,ouit0408/sakai,bkirschn/sakai,zqian/sakai,rodriguezdevera/sakai,pushyamig/sakai,rodriguezdevera/sakai,noondaysun/sakai,rodriguezdevera/sakai,wfuedu/sakai,duke-compsci290-spring2016/sakai,zqian/sakai,whumph/sakai,hackbuteer59/sakai,surya-janani/sakai,bkirschn/sakai,lorenamgUMU/sakai,kingmook/sakai,noondaysun/sakai,kingmook/sakai,rodriguezdevera/sakai,joserabal/sakai,noondaysun/sakai,frasese/sakai,wfuedu/sakai,ktakacs/sakai,kingmook/sakai,wfuedu/sakai,tl-its-umich-edu/sakai,clhedrick/sakai,udayg/sakai,introp-software/sakai,hackbuteer59/sakai,bkirschn/sakai,conder/sakai,willkara/sakai,lorenamgUMU/sakai,clhedrick/sakai,zqian/sakai,OpenCollabZA/sakai,duke-compsci290-spring2016/sakai,kwedoff1/sakai,bkirschn/sakai,lorenamgUMU/sakai,frasese/sakai,colczr/sakai,clhedrick/sakai,lorenamgUMU/sakai,kwedoff1/sakai,conder/sakai,lorenamgUMU/sakai,udayg/sakai,willkara/sakai,OpenCollabZA/sakai,introp-software/sakai,willkara/sakai,kwedoff1/sakai,whumph/sakai,buckett/sakai-gitflow,ktakacs/sakai,buckett/sakai-gitflow,ktakacs/sakai,OpenCollabZA/sakai,pushyamig/sakai,conder/sakai,bzhouduke123/sakai,Fudan-University/sakai,frasese/sakai,zqian/sakai,colczr/sakai,OpenCollabZA/sakai,buckett/sakai-gitflow,hackbuteer59/sakai,colczr/sakai,surya-janani/sakai,joserabal/sakai,pushyamig/sakai,duke-compsci290-spring2016/sakai,conder/sakai,udayg/sakai,joserabal/sakai,wfuedu/sakai,surya-janani/sakai,zqian/sakai,wfuedu/sakai,clhedrick/sakai,puramshetty/sakai,surya-janani/sakai,noondaysun/sakai,bkirschn/sakai,introp-software/sakai,clhedrick/sakai,tl-its-umich-edu/sakai,rodriguezdevera/sakai,introp-software/sakai
package org.sakaiproject.emailtemplateservice.tool.producers; import java.util.Collections; import java.util.Comparator; import java.util.List; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.sakaiproject.emailtemplateservice.model.EmailTemplate; import org.sakaiproject.emailtemplateservice.service.EmailTemplateService; import org.sakaiproject.emailtemplateservice.service.external.ExternalLogic; import org.sakaiproject.emailtemplateservice.tool.params.EmailTemplateViewParams; import uk.org.ponder.messageutil.TargettedMessage; import uk.org.ponder.messageutil.TargettedMessageList; import uk.org.ponder.rsf.components.UIBranchContainer; import uk.org.ponder.rsf.components.UIContainer; import uk.org.ponder.rsf.components.UIInternalLink; import uk.org.ponder.rsf.components.UIOutput; import uk.org.ponder.rsf.components.UIMessage; import uk.org.ponder.rsf.view.ComponentChecker; import uk.org.ponder.rsf.view.DefaultView; import uk.org.ponder.rsf.view.ViewComponentProducer; import uk.org.ponder.rsf.viewstate.ViewParameters; public class MainViewProducer implements ViewComponentProducer, DefaultView { public static final String VIEW_ID = "main"; public String getViewID() { // TODO Auto-generated method stub return VIEW_ID; } private static Log log = LogFactory.getLog(MainViewProducer.class); private EmailTemplateService emailTemplateService; public void setEmailTemplateService(EmailTemplateService ets) { emailTemplateService = ets; } private ExternalLogic externalLogic; public void setExternalLogic(ExternalLogic externalLogic) { this.externalLogic = externalLogic; } private TargettedMessageList messages; public void setMessages(TargettedMessageList messages) { this.messages = messages; } public void fillComponents(UIContainer tofill, ViewParameters viewparams, ComponentChecker checker) { //is this user admin? if (!externalLogic.isSuperUser()) { messages.addMessage(new TargettedMessage("tool.notAdmin", new Object[]{}, TargettedMessage.SEVERITY_ERROR)); return; } UIBranchContainer navIntra = UIBranchContainer.make(tofill, "navintra:"); UIInternalLink.make(navIntra,"actions-add", UIMessage.make("mainview.new") , new EmailTemplateViewParams(ModifyEmailProducer.VIEW_ID, null)); UIBranchContainer table = UIBranchContainer.make(tofill, "table:"); List<EmailTemplate> templates = emailTemplateService.getEmailTemplates(0, 0); Collections.sort(templates, new EmailTemplateComaparator()); for (int i =0; i < templates.size(); i++) { EmailTemplate template = templates.get(i); log.debug("got template: " + template.getKey()); UIBranchContainer row = UIBranchContainer.make(table, "template-row:", template.getId().toString()); UIOutput.make(row, "template-key", template.getKey()); String locale = template.getLocale(); if (locale == null ) locale = ""; UIOutput.make(row, "template-locale", locale); UIInternalLink.make(row,"template-edit" , UIMessage.make("mainview.edit"), new EmailTemplateViewParams(ModifyEmailProducer.VIEW_ID, template.getId().toString())); //UIInternalLink.make(row,"template-delete" , UIMessage.make("mainview.delete"), new EmailTemplateViewParams(ModifyEmailProducer.VIEW_ID, template.getId().toString())); } } private class EmailTemplateComaparator implements Comparator<EmailTemplate> { public int compare(EmailTemplate o1, EmailTemplate o2) { if (o1 == null && o2 != null) { return 1; } else if (o1 != null && o2 == null) { return -1; } String key1 = o1.getKey(); String key2 = o2.getKey(); String locale1 = o1.getLocale(); String locale2 = o2.getLocale(); int keyDiff = key1.compareTo(key2); if (keyDiff != 0) { return keyDiff; } //keys are equal compare the locale return locale1.compareTo(locale2); } } }
emailtemplateservice/tool/src/java/org/sakaiproject/emailtemplateservice/tool/producers/MainViewProducer.java
package org.sakaiproject.emailtemplateservice.tool.producers; import java.util.List; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.sakaiproject.emailtemplateservice.model.EmailTemplate; import org.sakaiproject.emailtemplateservice.service.EmailTemplateService; import org.sakaiproject.emailtemplateservice.service.external.ExternalLogic; import org.sakaiproject.emailtemplateservice.tool.params.EmailTemplateViewParams; import uk.org.ponder.messageutil.TargettedMessage; import uk.org.ponder.messageutil.TargettedMessageList; import uk.org.ponder.rsf.components.UIBranchContainer; import uk.org.ponder.rsf.components.UIContainer; import uk.org.ponder.rsf.components.UIInternalLink; import uk.org.ponder.rsf.components.UIOutput; import uk.org.ponder.rsf.components.UIMessage; import uk.org.ponder.rsf.view.ComponentChecker; import uk.org.ponder.rsf.view.DefaultView; import uk.org.ponder.rsf.view.ViewComponentProducer; import uk.org.ponder.rsf.viewstate.ViewParameters; public class MainViewProducer implements ViewComponentProducer, DefaultView { public static final String VIEW_ID = "main"; public String getViewID() { // TODO Auto-generated method stub return VIEW_ID; } private static Log log = LogFactory.getLog(MainViewProducer.class); private EmailTemplateService emailTemplateService; public void setEmailTemplateService(EmailTemplateService ets) { emailTemplateService = ets; } private ExternalLogic externalLogic; public void setExternalLogic(ExternalLogic externalLogic) { this.externalLogic = externalLogic; } private TargettedMessageList messages; public void setMessages(TargettedMessageList messages) { this.messages = messages; } public void fillComponents(UIContainer tofill, ViewParameters viewparams, ComponentChecker checker) { //is this user admin? if (!externalLogic.isSuperUser()) { messages.addMessage(new TargettedMessage("tool.notAdmin", new Object[]{}, TargettedMessage.SEVERITY_ERROR)); return; } UIBranchContainer navIntra = UIBranchContainer.make(tofill, "navintra:"); UIInternalLink.make(navIntra,"actions-add", UIMessage.make("mainview.new") , new EmailTemplateViewParams(ModifyEmailProducer.VIEW_ID, null)); UIBranchContainer table = UIBranchContainer.make(tofill, "table:"); List<EmailTemplate> templates = emailTemplateService.getEmailTemplates(0, 0); for (int i =0; i < templates.size(); i++) { EmailTemplate template = templates.get(i); log.debug("got template: " + template.getKey()); UIBranchContainer row = UIBranchContainer.make(table, "template-row:", template.getId().toString()); UIOutput.make(row, "template-key", template.getKey()); String locale = template.getLocale(); if (locale == null ) locale = ""; UIOutput.make(row, "template-locale", locale); UIInternalLink.make(row,"template-edit" , UIMessage.make("mainview.edit"), new EmailTemplateViewParams(ModifyEmailProducer.VIEW_ID, template.getId().toString())); //UIInternalLink.make(row,"template-delete" , UIMessage.make("mainview.delete"), new EmailTemplateViewParams(ModifyEmailProducer.VIEW_ID, template.getId().toString())); } } }
SAK-23492 sort the list of emailTemplates git-svn-id: 33bfb07abb5e6b25fc84e5a4806e76c6fa47d6b5@122838 66ffb92e-73f9-0310-93c1-f5514f145a0a
emailtemplateservice/tool/src/java/org/sakaiproject/emailtemplateservice/tool/producers/MainViewProducer.java
SAK-23492 sort the list of emailTemplates
<ide><path>mailtemplateservice/tool/src/java/org/sakaiproject/emailtemplateservice/tool/producers/MainViewProducer.java <ide> package org.sakaiproject.emailtemplateservice.tool.producers; <ide> <add>import java.util.Collections; <add>import java.util.Comparator; <ide> import java.util.List; <ide> <ide> import org.apache.commons.logging.Log; <ide> UIBranchContainer table = UIBranchContainer.make(tofill, "table:"); <ide> <ide> List<EmailTemplate> templates = emailTemplateService.getEmailTemplates(0, 0); <add> Collections.sort(templates, new EmailTemplateComaparator()); <ide> for (int i =0; i < templates.size(); i++) { <ide> EmailTemplate template = templates.get(i); <ide> log.debug("got template: " + template.getKey()); <ide> //UIInternalLink.make(row,"template-delete" , UIMessage.make("mainview.delete"), new EmailTemplateViewParams(ModifyEmailProducer.VIEW_ID, template.getId().toString())); <ide> } <ide> } <add> <add> private class EmailTemplateComaparator implements Comparator<EmailTemplate> { <add> <add> <add> public int compare(EmailTemplate o1, EmailTemplate o2) { <add> if (o1 == null && o2 != null) { <add> return 1; <add> } else if (o1 != null && o2 == null) { <add> return -1; <add> } <add> <add> String key1 = o1.getKey(); <add> String key2 = o2.getKey(); <add> String locale1 = o1.getLocale(); <add> String locale2 = o2.getLocale(); <add> <add> int keyDiff = key1.compareTo(key2); <add> if (keyDiff != 0) { <add> return keyDiff; <add> } <add> <add> //keys are equal compare the locale <add> return locale1.compareTo(locale2); <add> <add> } <add> <add> } <ide> <ide> }
JavaScript
mit
ddee7d718dd57ba0ab8a1e489d3eca45091b1523
0
compute-io/cast-arrays,dstructs/cast-arrays
'use strict'; // MODULES // var arrayLike = require( 'validate.io-array-like' ), typeName = require( 'type-name' ); // VARIABLES // var DTYPES = require( 'compute-array-dtypes/dtypes' ), CTORS = require( 'compute-array-constructors/ctors' ); // CAST // /** * FUNCTION: cast( x, type ) * Casts an input array or array-like object to a specified type. * * @private * @param {Object|Array|Int8Array|Uint8Array|Uint8ClampedArray|Int16Array|Uint16Array|Int32Array|Uint32Array|Float32Array|Float64Array} x - value to cast * @param {String|Array|Int8Array|Uint8Array|Uint8ClampedArray|Int16Array|Uint16Array|Int32Array|Uint32Array|Float32Array|Float64Array} type - type to which to cast or a value from which the desired type should be inferred * @returns {Array|Int8Array|Uint8Array|Uint8ClampedArray|Int16Array|Uint16Array|Int32Array|Uint32Array|Float32Array|Float64Array} casted value */ function cast( x, type ) { /* jshint newcap:false */ var ctor, len, d, i; if ( !arrayLike( x ) ) { throw new TypeError( 'cast()::invalid input argument. First argument must be an array-like object. Value: `' + x + '`.' ); } if ( typeof type === 'string' ) { ctor = CTORS[ type ]; } else { ctor = CTORS[ DTYPES[ typeName[ type ] ] ]; } if ( ctor === void 0 ) { throw new Error( 'cast()::invalid input argument. Unrecognized/unsupported type to which to cast. Value: `' + type + '`.' ); } len = x.length; d = new ctor( len ); for ( i = 0; i < len; i++ ) { d[ i ] = x[ i ]; } return d; } // end FUNCTION cast() // EXPORTS // module.exports = cast;
lib/index.js
'use strict'; // MODULES // var arrayLike = require( 'validate.io-array-like' ), typeName = require( 'type-name' ); // VARIABLES // var DTYPES = require( 'compute-array-dtypes/dtypes' ), CTORS = require( 'compute-array-constructors/ctors' ); // CAST // /** * FUNCTION: cast( x, type ) * Casts an input array or array-like object to a specified type. * * @private * @param {Object|Array|Int8Array|Uint8Array|Uint8ClampedArray|Int16Array|Uint16Array|Int32Array|Uint32Array|Float32Array|Float64Array} x - value to cast * @param {String|Array|Int8Array|Uint8Array|Uint8ClampedArray|Int16Array|Uint16Array|Int32Array|Uint32Array|Float32Array|Float64Array} type - type to which to cast or a value from which the desired type should be inferred * @returns {Array|Int8Array|Uint8Array|Uint8ClampedArray|Int16Array|Uint16Array|Int32Array|Uint32Array|Float32Array|Float64Array} casted value */ function cast( x, type ) { var ctor, len, d, i; if ( !arrayLike( x ) ) { throw new TypeError( 'cast()::invalid input argument. First argument must be an array-like object. Value: `' + x + '`.' ); } if ( typeof type === 'string' ) { ctor = CTORS[ type ]; } else { ctor = CTORS[ DTYPES[ typeName[ type ] ] ]; } if ( ctor === void 0 ) { throw new Error( 'cast()::invalid input argument. Unrecognized/unsupported type to which to cast. Value: `' + type + '`.' ); } /* jshint newcap:false */ d = new ctor( len ); for ( i = 0; i < len; i++ ) { d[ i ] = x[ i ]; } return d; } // end FUNCTION cast() // EXPORTS // module.exports = cast;
[FIX] length assignment.
lib/index.js
[FIX] length assignment.
<ide><path>ib/index.js <ide> * @returns {Array|Int8Array|Uint8Array|Uint8ClampedArray|Int16Array|Uint16Array|Int32Array|Uint32Array|Float32Array|Float64Array} casted value <ide> */ <ide> function cast( x, type ) { <add> /* jshint newcap:false */ <ide> var ctor, <ide> len, <ide> d, <ide> if ( ctor === void 0 ) { <ide> throw new Error( 'cast()::invalid input argument. Unrecognized/unsupported type to which to cast. Value: `' + type + '`.' ); <ide> } <del> /* jshint newcap:false */ <add> len = x.length; <ide> d = new ctor( len ); <ide> for ( i = 0; i < len; i++ ) { <ide> d[ i ] = x[ i ];
Java
apache-2.0
4230151be6385a6f34806587e0429d5129817d3e
0
alonsod86/orientdb,allanmoso/orientdb,wyzssw/orientdb,mmacfadden/orientdb,mbhulin/orientdb,rprabhat/orientdb,sanyaade-g2g-repos/orientdb,alonsod86/orientdb,mbhulin/orientdb,jdillon/orientdb,wyzssw/orientdb,intfrr/orientdb,sanyaade-g2g-repos/orientdb,intfrr/orientdb,wyzssw/orientdb,cstamas/orientdb,giastfader/orientdb,mbhulin/orientdb,jdillon/orientdb,mbhulin/orientdb,allanmoso/orientdb,joansmith/orientdb,wouterv/orientdb,tempbottle/orientdb,wyzssw/orientdb,allanmoso/orientdb,redox/OrientDB,wouterv/orientdb,orientechnologies/orientdb,mmacfadden/orientdb,rprabhat/orientdb,cstamas/orientdb,joansmith/orientdb,cstamas/orientdb,joansmith/orientdb,rprabhat/orientdb,wouterv/orientdb,intfrr/orientdb,rprabhat/orientdb,tempbottle/orientdb,redox/OrientDB,alonsod86/orientdb,redox/OrientDB,allanmoso/orientdb,giastfader/orientdb,orientechnologies/orientdb,redox/OrientDB,cstamas/orientdb,intfrr/orientdb,tempbottle/orientdb,mmacfadden/orientdb,jdillon/orientdb,giastfader/orientdb,joansmith/orientdb,wouterv/orientdb,tempbottle/orientdb,orientechnologies/orientdb,alonsod86/orientdb,giastfader/orientdb,mmacfadden/orientdb,orientechnologies/orientdb,sanyaade-g2g-repos/orientdb,sanyaade-g2g-repos/orientdb
/* * Copyright 1999-2010 Luca Garulli (l.garulli--at--orientechnologies.com) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.orientechnologies.orient.core.type.tree; import java.io.IOException; import com.orientechnologies.common.collection.OMVRBTreeEntry; import com.orientechnologies.common.log.OLogManager; import com.orientechnologies.orient.core.db.ODatabaseRecordThreadLocal; import com.orientechnologies.orient.core.exception.OSerializationException; import com.orientechnologies.orient.core.id.ORID; /** * Persistent TreeMap implementation that use a ODatabase instance to handle the entries. This class can be used also from the user. * It's transaction aware. * * @author Luca Garulli * * @param <K> * Key type * @param <V> * Value type */ @SuppressWarnings("serial") public class OMVRBTreeEntryDatabase<K, V> extends OMVRBTreeEntryPersistent<K, V> { /** * Called on event of splitting an entry. * * @param iParent * Parent node * @param iPosition * Current position * @param iLeft */ public OMVRBTreeEntryDatabase(OMVRBTreeEntry<K, V> iParent, int iPosition) { super(iParent, iPosition); } /** * Called upon unmarshalling. * * @param iTree * Tree which belong * @param iParent * Parent node if any * @param iRecordId * Record to unmarshall */ public OMVRBTreeEntryDatabase(OMVRBTreeDatabase<K, V> iTree, OMVRBTreeEntryDatabase<K, V> iParent, ORID iRecordId) throws IOException { super(iTree, iParent, iRecordId); load(); } public OMVRBTreeEntryDatabase(OMVRBTreeDatabase<K, V> iTree, K key, V value, OMVRBTreeEntryDatabase<K, V> iParent) { super(iTree, key, value, iParent); } @Override public OMVRBTreeEntryDatabase<K, V> load() throws IOException { try { record.setDatabase(ODatabaseRecordThreadLocal.INSTANCE.get()); record.reload(); } catch (Exception e) { // ERROR, MAYBE THE RECORD WASN'T CREATED OLogManager.instance().warn(this, "Error on loading index node record %s", e, record.getIdentity()); } record.recycle(this); fromStream(record.toStream()); return this; } @Override public OMVRBTreeEntryDatabase<K, V> save() throws OSerializationException { if (!record.isDirty()) return this; super.save(); if (parent != null) if (!parent.record.getIdentity().equals(parentRid)) OLogManager.instance().error(this, "[save]: Tree node %s has parentRid '%s' different by the rid of the assigned parent node: %s", record.getIdentity(), parentRid, parent.record.getIdentity()); checkEntryStructure(); if (pTree.cache.get(record.getIdentity()) != this) // UPDATE THE CACHE pTree.cache.put(record.getIdentity(), this); return this; } /** * Delete all the nodes recursively. IF they are not loaded in memory, load all the tree. * * @throws IOException */ @Override public OMVRBTreeEntryDatabase<K, V> delete() throws IOException { // EARLY LOAD LEFT AND DELETE IT RECURSIVELY if (getLeft() != null) ((OMVRBTreeEntryPersistent<K, V>) getLeft()).delete(); leftRid = null; // EARLY LOAD RIGHT AND DELETE IT RECURSIVELY if (getRight() != null) ((OMVRBTreeEntryPersistent<K, V>) getRight()).delete(); rightRid = null; // DELETE MYSELF record.setDatabase(ODatabaseRecordThreadLocal.INSTANCE.get()); record.delete(); // FORCE REMOVING OF K/V AND SEIALIZED K/V AS WELL keys = null; values = null; serializedKeys = null; serializedValues = null; super.delete(); return this; } @Override protected Object keyFromStream(final int iIndex) throws IOException { return pTree.keySerializer.fromStream(((OMVRBTreeDatabase<K, V>) pTree).getDatabase(), inStream.getAsByteArray(serializedKeys[iIndex])); } @Override protected Object valueFromStream(final int iIndex) throws IOException { return pTree.valueSerializer.fromStream(((OMVRBTreeDatabase<K, V>) pTree).getDatabase(), inStream.getAsByteArray(serializedValues[iIndex])); } }
core/src/main/java/com/orientechnologies/orient/core/type/tree/OMVRBTreeEntryDatabase.java
/* * Copyright 1999-2010 Luca Garulli (l.garulli--at--orientechnologies.com) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.orientechnologies.orient.core.type.tree; import java.io.IOException; import com.orientechnologies.common.collection.OMVRBTreeEntry; import com.orientechnologies.common.log.OLogManager; import com.orientechnologies.orient.core.db.ODatabaseRecordThreadLocal; import com.orientechnologies.orient.core.exception.OSerializationException; import com.orientechnologies.orient.core.id.ORID; /** * Persistent TreeMap implementation that use a ODatabase instance to handle the entries. This class can be used also from the user. * It's transaction aware. * * @author Luca Garulli * * @param <K> * Key type * @param <V> * Value type */ @SuppressWarnings("serial") public class OMVRBTreeEntryDatabase<K, V> extends OMVRBTreeEntryPersistent<K, V> { /** * Called on event of splitting an entry. * * @param iParent * Parent node * @param iPosition * Current position * @param iLeft */ public OMVRBTreeEntryDatabase(OMVRBTreeEntry<K, V> iParent, int iPosition) { super(iParent, iPosition); } /** * Called upon unmarshalling. * * @param iTree * Tree which belong * @param iParent * Parent node if any * @param iRecordId * Record to unmarshall */ public OMVRBTreeEntryDatabase(OMVRBTreeDatabase<K, V> iTree, OMVRBTreeEntryDatabase<K, V> iParent, ORID iRecordId) throws IOException { super(iTree, iParent, iRecordId); load(); } public OMVRBTreeEntryDatabase(OMVRBTreeDatabase<K, V> iTree, K key, V value, OMVRBTreeEntryDatabase<K, V> iParent) { super(iTree, key, value, iParent); } @Override public OMVRBTreeEntryDatabase<K, V> load() throws IOException { try { record.setDatabase(ODatabaseRecordThreadLocal.INSTANCE.get()); record.reload(); } catch (Exception e) { // ERROR, MAYBE THE RECORD WASN'T CREATED } record.recycle(this); fromStream(record.toStream()); return this; } @Override public OMVRBTreeEntryDatabase<K, V> save() throws OSerializationException { if (!record.isDirty()) return this; super.save(); if (parent != null) if (!parent.record.getIdentity().equals(parentRid)) OLogManager.instance().error(this, "[save]: Tree node %s has parentRid '%s' different by the rid of the assigned parent node: %s", record.getIdentity(), parentRid, parent.record.getIdentity()); checkEntryStructure(); if (pTree.cache.get(record.getIdentity()) != this) // UPDATE THE CACHE pTree.cache.put(record.getIdentity(), this); return this; } /** * Delete all the nodes recursively. IF they are not loaded in memory, load all the tree. * * @throws IOException */ @Override public OMVRBTreeEntryDatabase<K, V> delete() throws IOException { // EARLY LOAD LEFT AND DELETE IT RECURSIVELY if (getLeft() != null) ((OMVRBTreeEntryPersistent<K, V>) getLeft()).delete(); leftRid = null; // EARLY LOAD RIGHT AND DELETE IT RECURSIVELY if (getRight() != null) ((OMVRBTreeEntryPersistent<K, V>) getRight()).delete(); rightRid = null; // DELETE MYSELF record.setDatabase(ODatabaseRecordThreadLocal.INSTANCE.get()); record.delete(); // FORCE REMOVING OF K/V AND SEIALIZED K/V AS WELL keys = null; values = null; serializedKeys = null; serializedValues = null; super.delete(); return this; } @Override protected Object keyFromStream(final int iIndex) throws IOException { return pTree.keySerializer.fromStream(((OMVRBTreeDatabase<K, V>) pTree).getDatabase(), inStream.getAsByteArray(serializedKeys[iIndex])); } @Override protected Object valueFromStream(final int iIndex) throws IOException { return pTree.valueSerializer.fromStream(((OMVRBTreeDatabase<K, V>) pTree).getDatabase(), inStream.getAsByteArray(serializedValues[iIndex])); } }
Index: added log error in case the tree nodes' record can't be loaded
core/src/main/java/com/orientechnologies/orient/core/type/tree/OMVRBTreeEntryDatabase.java
Index: added log error in case the tree nodes' record can't be loaded
<ide><path>ore/src/main/java/com/orientechnologies/orient/core/type/tree/OMVRBTreeEntryDatabase.java <ide> record.reload(); <ide> } catch (Exception e) { <ide> // ERROR, MAYBE THE RECORD WASN'T CREATED <add> OLogManager.instance().warn(this, "Error on loading index node record %s", e, record.getIdentity()); <ide> } <ide> record.recycle(this); <ide> fromStream(record.toStream());
Java
apache-2.0
312ab339abb23f0204aa98474d4c0817d5d44a0c
0
lucaswerkmeister/ceylon.language,jvasileff/ceylon.language,ceylon/ceylon.language,unratito/ceylon.language,ceylon/ceylon.language,jvasileff/ceylon.language,unratito/ceylon.language,lucaswerkmeister/ceylon.language
package com.redhat.ceylon.compiler.java.runtime.metamodel; import java.lang.reflect.Constructor; import java.lang.reflect.InvocationHandler; import java.util.ArrayList; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Set; import ceylon.language.Anything; import ceylon.language.ArraySequence; import ceylon.language.Callable; import ceylon.language.Iterator; import ceylon.language.Null; import ceylon.language.SequenceBuilder; import ceylon.language.Sequential; import ceylon.language.empty_; import ceylon.language.finished_; import ceylon.language.Annotated; import ceylon.language.meta.model.ClassOrInterface; import ceylon.language.meta.model.IncompatibleTypeException; import ceylon.language.meta.model.InvocationException; import ceylon.language.ConstrainedAnnotation; import ceylon.language.meta.model.TypeApplicationException; import ceylon.language.meta.declaration.AnnotatedDeclaration; import ceylon.language.meta.declaration.Module; import ceylon.language.meta.declaration.Package; import ceylon.language.meta.declaration.NestableDeclaration; import com.redhat.ceylon.cmr.api.ArtifactResult; import com.redhat.ceylon.cmr.api.Logger; import com.redhat.ceylon.cmr.api.RepositoryManager; import com.redhat.ceylon.cmr.api.RepositoryManagerBuilder; import com.redhat.ceylon.compiler.java.Util; import com.redhat.ceylon.compiler.java.codegen.Naming; import com.redhat.ceylon.compiler.java.language.BooleanArray; import com.redhat.ceylon.compiler.java.language.ByteArray; import com.redhat.ceylon.compiler.java.language.CharArray; import com.redhat.ceylon.compiler.java.language.DoubleArray; import com.redhat.ceylon.compiler.java.language.FloatArray; import com.redhat.ceylon.compiler.java.language.IntArray; import com.redhat.ceylon.compiler.java.language.InternalMap; import com.redhat.ceylon.compiler.java.language.LongArray; import com.redhat.ceylon.compiler.java.language.ObjectArray; import com.redhat.ceylon.compiler.java.language.ShortArray; import com.redhat.ceylon.compiler.java.metadata.Ceylon; import com.redhat.ceylon.compiler.java.metadata.Variance; import com.redhat.ceylon.compiler.java.runtime.model.ReifiedType; import com.redhat.ceylon.compiler.java.runtime.model.RuntimeModuleManager; import com.redhat.ceylon.compiler.java.runtime.model.TypeDescriptor; import com.redhat.ceylon.compiler.loader.ModelLoader.DeclarationType; import com.redhat.ceylon.compiler.loader.impl.reflect.mirror.ReflectionClass; import com.redhat.ceylon.compiler.loader.impl.reflect.mirror.ReflectionMethod; import com.redhat.ceylon.compiler.loader.model.JavaMethod; import com.redhat.ceylon.compiler.loader.model.LazyClass; import com.redhat.ceylon.compiler.loader.model.LazyClassAlias; import com.redhat.ceylon.compiler.loader.model.LazyElement; import com.redhat.ceylon.compiler.loader.model.LazyInterface; import com.redhat.ceylon.compiler.loader.model.LazyMethod; import com.redhat.ceylon.compiler.loader.model.LazyPackage; import com.redhat.ceylon.compiler.loader.model.LazyValue; import com.redhat.ceylon.compiler.typechecker.analyzer.ExpressionVisitor; import com.redhat.ceylon.compiler.typechecker.context.Context; import com.redhat.ceylon.compiler.typechecker.io.VFS; import com.redhat.ceylon.compiler.typechecker.model.Declaration; import com.redhat.ceylon.compiler.typechecker.model.Functional; import com.redhat.ceylon.compiler.typechecker.model.Method; import com.redhat.ceylon.compiler.typechecker.model.NothingType; import com.redhat.ceylon.compiler.typechecker.model.Parameter; import com.redhat.ceylon.compiler.typechecker.model.ProducedReference; import com.redhat.ceylon.compiler.typechecker.model.ProducedType; import com.redhat.ceylon.compiler.typechecker.model.Scope; import com.redhat.ceylon.compiler.typechecker.model.TypeDeclaration; import com.redhat.ceylon.compiler.typechecker.model.TypeParameter; public class Metamodel { private static RuntimeModuleManager moduleManager; // FIXME: this will need better thinking in terms of memory usage private static Map<com.redhat.ceylon.compiler.typechecker.model.Declaration, com.redhat.ceylon.compiler.java.runtime.metamodel.FreeNestableDeclaration> typeCheckModelToRuntimeModel = new HashMap<com.redhat.ceylon.compiler.typechecker.model.Declaration, com.redhat.ceylon.compiler.java.runtime.metamodel.FreeNestableDeclaration>(); private static Map<com.redhat.ceylon.compiler.typechecker.model.Package, com.redhat.ceylon.compiler.java.runtime.metamodel.FreePackage> typeCheckPackagesToRuntimeModel = new HashMap<com.redhat.ceylon.compiler.typechecker.model.Package, com.redhat.ceylon.compiler.java.runtime.metamodel.FreePackage>(); private static Map<com.redhat.ceylon.compiler.typechecker.model.Module, com.redhat.ceylon.compiler.java.runtime.metamodel.FreeModule> typeCheckModulesToRuntimeModel = new HashMap<com.redhat.ceylon.compiler.typechecker.model.Module, com.redhat.ceylon.compiler.java.runtime.metamodel.FreeModule>(); static{ resetModuleManager(); } public static void loadModule(String name, String version, ArtifactResult result, ClassLoader classLoader){ moduleManager.loadModule(name, version, result, classLoader); } public static void resetModuleManager() { RepositoryManagerBuilder builder = new RepositoryManagerBuilder(new Logger(){ @Override public void error(String str) { System.err.println("ERROR: "+str); } @Override public void warning(String str) { System.err.println("WARN: "+str); } @Override public void info(String str) { System.err.println("INFO: "+str); } @Override public void debug(String str) { System.err.println("DEBUG: "+str); } }, false); RepositoryManager repoManager = builder.buildRepository(); VFS vfs = new VFS(); Context context = new Context(repoManager, vfs); moduleManager = new RuntimeModuleManager(context); moduleManager.initCoreModules(); moduleManager.prepareForTypeChecking(); typeCheckModelToRuntimeModel.clear(); typeCheckModulesToRuntimeModel.clear(); typeCheckPackagesToRuntimeModel.clear(); } public static Object getLock(){ return moduleManager.getModelLoader(); } public static TypeDescriptor getTypeDescriptor(Object instance) { if(instance == null) return Null.$TypeDescriptor$; else if(instance instanceof ReifiedType) return((ReifiedType) instance).$getType$(); else return getJavaTypeDescriptor(instance.getClass()); } private static TypeDescriptor getJavaArrayTypeDescriptor(Class<?> klass) { if(klass == byte[].class) return ByteArray.$TypeDescriptor$; if(klass == short[].class) return ShortArray.$TypeDescriptor$; if(klass == int[].class) return IntArray.$TypeDescriptor$; if(klass == long[].class) return LongArray.$TypeDescriptor$; if(klass == float[].class) return FloatArray.$TypeDescriptor$; if(klass == double[].class) return DoubleArray.$TypeDescriptor$; if(klass == boolean[].class) return BooleanArray.$TypeDescriptor$; if(klass == char[].class) return CharArray.$TypeDescriptor$; TypeDescriptor componentType = getJavaTypeDescriptor(klass.getComponentType()); return TypeDescriptor.klass(ObjectArray.class, componentType); } private static TypeDescriptor getJavaTypeDescriptor(Class<?> klass) { if(klass.isArray()) return getJavaArrayTypeDescriptor(klass); // make sure java.lang.Object doesn't leak in the ceylon metamodel // TODO: what about Throwable/j.l.Exception/RuntimeException? if(klass == Object.class) return ceylon.language.Object.$TypeDescriptor$; return TypeDescriptor.klass(klass); } public static boolean isReified(java.lang.Object o, TypeDescriptor type){ TypeDescriptor instanceType = getTypeDescriptor(o); if(instanceType == null) return false; return instanceType.toProducedType(moduleManager).isSubtypeOf(type.toProducedType(moduleManager)); } public static ProducedType getProducedType(Object instance) { TypeDescriptor instanceType = getTypeDescriptor(instance); if(instanceType == null) throw new RuntimeException("Metamodel not yet supported for Java types"); return getProducedType(instanceType); } public static ProducedType getProducedType(TypeDescriptor reifiedType) { return reifiedType.toProducedType(moduleManager); } public static ceylon.language.meta.model.Type<?> getAppliedMetamodel(TypeDescriptor typeDescriptor) { if(typeDescriptor == null) throw new RuntimeException("Metamodel not yet supported for Java types"); ProducedType pt = typeDescriptor.toProducedType(moduleManager); return getAppliedMetamodel(pt); } public static com.redhat.ceylon.compiler.java.runtime.metamodel.FreeNestableDeclaration getOrCreateMetamodel(com.redhat.ceylon.compiler.typechecker.model.Declaration declaration){ synchronized(getLock()){ com.redhat.ceylon.compiler.java.runtime.metamodel.FreeNestableDeclaration ret = typeCheckModelToRuntimeModel.get(declaration); if(ret == null){ if(declaration instanceof com.redhat.ceylon.compiler.typechecker.model.Class){ com.redhat.ceylon.compiler.typechecker.model.Class klass = (com.redhat.ceylon.compiler.typechecker.model.Class) declaration; ret = new com.redhat.ceylon.compiler.java.runtime.metamodel.FreeClass(klass); }else if(declaration instanceof com.redhat.ceylon.compiler.typechecker.model.Interface){ com.redhat.ceylon.compiler.typechecker.model.Interface interf = (com.redhat.ceylon.compiler.typechecker.model.Interface)declaration; ret = new com.redhat.ceylon.compiler.java.runtime.metamodel.FreeInterface(interf); }else if(declaration instanceof com.redhat.ceylon.compiler.typechecker.model.TypeAlias){ com.redhat.ceylon.compiler.typechecker.model.TypeAlias alias = (com.redhat.ceylon.compiler.typechecker.model.TypeAlias)declaration; ret = new com.redhat.ceylon.compiler.java.runtime.metamodel.FreeAliasDeclaration(alias); }else if(declaration instanceof com.redhat.ceylon.compiler.typechecker.model.Method){ com.redhat.ceylon.compiler.typechecker.model.TypedDeclaration method = (com.redhat.ceylon.compiler.typechecker.model.TypedDeclaration)declaration; ret = new com.redhat.ceylon.compiler.java.runtime.metamodel.FreeFunction(method); }else if(declaration instanceof com.redhat.ceylon.compiler.typechecker.model.Value){ com.redhat.ceylon.compiler.typechecker.model.TypedDeclaration value = (com.redhat.ceylon.compiler.typechecker.model.TypedDeclaration)declaration; ret = new FreeValue(value); }else{ throw new RuntimeException("Declaration type not supported yet: "+declaration); } typeCheckModelToRuntimeModel.put(declaration, ret); } return ret; } } public static boolean hasTypeParameters(com.redhat.ceylon.compiler.typechecker.model.TypedDeclaration model) { if(model instanceof com.redhat.ceylon.compiler.typechecker.model.Generic) return hasTypeParameters((com.redhat.ceylon.compiler.typechecker.model.Generic)model); if(model.getContainer() instanceof com.redhat.ceylon.compiler.typechecker.model.ClassOrInterface) return hasTypeParameters((com.redhat.ceylon.compiler.typechecker.model.ClassOrInterface)model.getContainer()); return false; } public static boolean hasTypeParameters(com.redhat.ceylon.compiler.typechecker.model.Generic model) { if(!model.getTypeParameters().isEmpty()) return true; Object container = ((com.redhat.ceylon.compiler.typechecker.model.Declaration)model).getContainer(); if(container instanceof com.redhat.ceylon.compiler.typechecker.model.ClassOrInterface) return hasTypeParameters((com.redhat.ceylon.compiler.typechecker.model.ClassOrInterface) container); return false; } public static com.redhat.ceylon.compiler.java.runtime.metamodel.FreePackage getOrCreateMetamodel(com.redhat.ceylon.compiler.typechecker.model.Package declaration){ synchronized(getLock()){ com.redhat.ceylon.compiler.java.runtime.metamodel.FreePackage ret = typeCheckPackagesToRuntimeModel.get(declaration); if(ret == null){ ret = new com.redhat.ceylon.compiler.java.runtime.metamodel.FreePackage(declaration); typeCheckPackagesToRuntimeModel.put(declaration, ret); } return ret; } } public static com.redhat.ceylon.compiler.java.runtime.metamodel.FreeModule getOrCreateMetamodel(com.redhat.ceylon.compiler.typechecker.model.Module declaration){ synchronized(getLock()){ com.redhat.ceylon.compiler.java.runtime.metamodel.FreeModule ret = typeCheckModulesToRuntimeModel.get(declaration); if(ret == null){ ret = new com.redhat.ceylon.compiler.java.runtime.metamodel.FreeModule(declaration); typeCheckModulesToRuntimeModel.put(declaration, ret); } return ret; } } public static ceylon.language.meta.declaration.OpenType getMetamodel(ProducedType pt) { TypeDeclaration declaration = pt.getDeclaration(); if(declaration instanceof com.redhat.ceylon.compiler.typechecker.model.Class){ return new com.redhat.ceylon.compiler.java.runtime.metamodel.FreeClassType(pt); } if(declaration instanceof com.redhat.ceylon.compiler.typechecker.model.Interface){ return new com.redhat.ceylon.compiler.java.runtime.metamodel.FreeInterfaceType(pt); } if(declaration instanceof com.redhat.ceylon.compiler.typechecker.model.TypeParameter){ com.redhat.ceylon.compiler.typechecker.model.TypeParameter tp = (com.redhat.ceylon.compiler.typechecker.model.TypeParameter) declaration; return new FreeTypeParameterType(tp); } if(declaration instanceof com.redhat.ceylon.compiler.typechecker.model.UnionType){ return new FreeUnionType((com.redhat.ceylon.compiler.typechecker.model.UnionType)declaration); } if(declaration instanceof com.redhat.ceylon.compiler.typechecker.model.IntersectionType){ return new FreeIntersectionType((com.redhat.ceylon.compiler.typechecker.model.IntersectionType)declaration); } if(declaration instanceof com.redhat.ceylon.compiler.typechecker.model.NothingType){ return ceylon.language.meta.declaration.nothingType_.get_(); } throw new RuntimeException("Declaration type not supported yet: "+declaration); } @SuppressWarnings({ "unchecked", "rawtypes" }) public static Sequential<? extends ceylon.language.meta.declaration.OpenType> getMetamodelSequential(List<ProducedType> types) { if(types.isEmpty()) return (Sequential<? extends ceylon.language.meta.declaration.OpenType>)(Sequential)empty_.get_(); ceylon.language.meta.declaration.OpenType[] ret = new ceylon.language.meta.declaration.OpenType[types.size()]; int i=0; for(ProducedType pt : types){ ret[i++] = Metamodel.getMetamodel(pt); } return Util.sequentialInstance(ceylon.language.meta.declaration.OpenType.$TypeDescriptor$, ret); } @SuppressWarnings({ "unchecked", "rawtypes" }) public static Sequential<? extends ceylon.language.meta.model.Type<? extends Object>> getAppliedMetamodelSequential(List<ProducedType> types) { if(types.isEmpty()) return (Sequential<? extends ceylon.language.meta.model.Type<? extends Object>>)(Sequential)empty_.get_(); ceylon.language.meta.model.Type<?>[] ret = new ceylon.language.meta.model.Type[types.size()]; int i=0; for(ProducedType pt : types){ ret[i++] = Metamodel.getAppliedMetamodel(pt); } return Util.sequentialInstance(TypeDescriptor.klass(ceylon.language.meta.model.Type.class, Anything.$TypeDescriptor$), ret); } @SuppressWarnings({ "rawtypes", "unchecked" }) public static <T> ceylon.language.meta.model.Type<T> getAppliedMetamodel(ProducedType pt) { TypeDeclaration declaration = pt.getDeclaration(); if(declaration instanceof com.redhat.ceylon.compiler.typechecker.model.Class){ // anonymous classes don't have parameter lists TypeDescriptor reifiedArguments; if(!declaration.isAnonymous() && !isLocalType((com.redhat.ceylon.compiler.typechecker.model.Class)declaration)) reifiedArguments = Metamodel.getTypeDescriptorForArguments(declaration.getUnit(), (Functional)declaration, pt); else reifiedArguments = TypeDescriptor.NothingType; TypeDescriptor reifiedType = getTypeDescriptorForProducedType(pt); if(declaration.isToplevel()) return new com.redhat.ceylon.compiler.java.runtime.metamodel.AppliedClass(reifiedType, reifiedArguments, pt, null, null); TypeDescriptor reifiedContainer = getTypeDescriptorForProducedType(pt.getQualifyingType()); return new com.redhat.ceylon.compiler.java.runtime.metamodel.AppliedMemberClass(reifiedContainer, reifiedType, reifiedArguments, pt); } if(declaration instanceof com.redhat.ceylon.compiler.typechecker.model.Interface){ TypeDescriptor reifiedType = getTypeDescriptorForProducedType(pt); if(declaration.isToplevel()) return new com.redhat.ceylon.compiler.java.runtime.metamodel.AppliedInterface<T>(reifiedType, pt, null, null); TypeDescriptor reifiedContainer = getTypeDescriptorForProducedType(pt.getQualifyingType()); return new com.redhat.ceylon.compiler.java.runtime.metamodel.AppliedMemberInterface(reifiedContainer, reifiedType, pt); } if(declaration instanceof com.redhat.ceylon.compiler.typechecker.model.UnionType){ TypeDescriptor reifiedType = getTypeDescriptorForProducedType(pt); return new AppliedUnionType<T>(reifiedType, (com.redhat.ceylon.compiler.typechecker.model.UnionType)declaration); } if(declaration instanceof com.redhat.ceylon.compiler.typechecker.model.IntersectionType){ TypeDescriptor reifiedType = getTypeDescriptorForProducedType(pt); return new AppliedIntersectionType<T>(reifiedType, (com.redhat.ceylon.compiler.typechecker.model.IntersectionType)declaration); } if(declaration instanceof com.redhat.ceylon.compiler.typechecker.model.NothingType){ return (ceylon.language.meta.model.Type<T>)ceylon.language.meta.model.nothingType_.get_(); } throw new RuntimeException("Declaration type not supported yet: "+declaration); } public static java.lang.Class<?> getJavaClass(com.redhat.ceylon.compiler.typechecker.model.Module module) { String className = module.getNameAsString() + ".module_"; ReflectionClass classMirror = (ReflectionClass)moduleManager.getModelLoader().lookupClassMirror(module, className); return classMirror.klass; } public static java.lang.Class<?> getJavaClass(com.redhat.ceylon.compiler.typechecker.model.Package pkg) { String className = ((LazyPackage) pkg).getNameAsString()+ ".package_"; ReflectionClass classMirror = (ReflectionClass)moduleManager.getModelLoader().lookupClassMirror(pkg.getModule(), className); return classMirror != null ? classMirror.klass : null; } public static java.lang.Class<?> getJavaClass(com.redhat.ceylon.compiler.typechecker.model.Declaration declaration) { if(declaration instanceof LazyClass){ ReflectionClass classMirror = (ReflectionClass) ((LazyClass) declaration).classMirror; return classMirror.klass; } if(declaration instanceof LazyInterface){ ReflectionClass classMirror = (ReflectionClass) ((LazyInterface) declaration).classMirror; return classMirror.klass; } if(declaration instanceof LazyMethod){ ReflectionClass classMirror = (ReflectionClass) ((LazyMethod) declaration).classMirror; return classMirror.klass; } if(declaration instanceof LazyValue){ ReflectionClass classMirror = (ReflectionClass) ((LazyValue) declaration).classMirror; return classMirror.klass; } if (declaration instanceof LazyClassAlias) { ReflectionClass classMirror = (ReflectionClass) ((LazyClassAlias) declaration).classMirror; return classMirror.klass; } if(declaration.getContainer() instanceof com.redhat.ceylon.compiler.typechecker.model.Declaration){ return getJavaClass((com.redhat.ceylon.compiler.typechecker.model.Declaration)declaration.getContainer()); } throw new RuntimeException("Unsupported declaration type: " + declaration); } public static java.lang.reflect.Method getJavaMethod(com.redhat.ceylon.compiler.typechecker.model.Method declaration) { if(declaration instanceof JavaMethod){ ReflectionMethod methodMirror = (ReflectionMethod) ((JavaMethod) declaration).mirror; return (java.lang.reflect.Method) methodMirror.method; } if(declaration instanceof LazyMethod){ ReflectionMethod methodMirror = (ReflectionMethod) ((LazyMethod) declaration).getMethodMirror(); return (java.lang.reflect.Method) methodMirror.method; } throw new RuntimeException("Unsupported declaration type: " + declaration); } public static TypeDescriptor getTypeDescriptorForProducedType(com.redhat.ceylon.compiler.typechecker.model.ProducedType type) { TypeDeclaration declaration = type.getDeclaration(); if(declaration instanceof LazyClass){ ReflectionClass classMirror = (ReflectionClass) ((LazyClass) declaration).classMirror; TypeDescriptor[] tdArgs = getTypeDescriptorsForProducedTypes(type.getTypeArgumentList()); TypeDescriptor ret = TypeDescriptor.klass(classMirror.klass, tdArgs); if(type.getQualifyingType() != null) return TypeDescriptor.member(getTypeDescriptorForProducedType(type.getQualifyingType()), ret); return ret; } if(declaration instanceof LazyInterface){ ReflectionClass classMirror = (ReflectionClass) ((LazyInterface) declaration).classMirror; TypeDescriptor[] tdArgs = getTypeDescriptorsForProducedTypes(type.getTypeArgumentList()); TypeDescriptor ret = TypeDescriptor.klass(classMirror.klass, tdArgs); if(type.getQualifyingType() != null) return TypeDescriptor.member(getTypeDescriptorForProducedType(type.getQualifyingType()), ret); return ret; } if(declaration instanceof NothingType){ return TypeDescriptor.NothingType; } if(declaration instanceof com.redhat.ceylon.compiler.typechecker.model.UnionType){ TypeDescriptor[] tdArgs = getTypeDescriptorsForProducedTypes(type.getCaseTypes()); return TypeDescriptor.union(tdArgs); } if(declaration instanceof com.redhat.ceylon.compiler.typechecker.model.IntersectionType){ TypeDescriptor[] tdArgs = getTypeDescriptorsForProducedTypes(type.getSatisfiedTypes()); return TypeDescriptor.intersection(tdArgs); } throw new RuntimeException("Unsupported declaration type: " + declaration); } private static TypeDescriptor[] getTypeDescriptorsForProducedTypes(List<ProducedType> args) { TypeDescriptor[] tdArgs = new TypeDescriptor[args.size()]; for(int i=0;i<tdArgs.length;i++){ tdArgs[i] = getTypeDescriptorForProducedType(args.get(i)); } return tdArgs; } public static ceylon.language.meta.declaration.FunctionDeclaration getMetamodel(Method method) { // find its container Scope container = method.getContainer(); if(container instanceof com.redhat.ceylon.compiler.typechecker.model.ClassOrInterface){ com.redhat.ceylon.compiler.java.runtime.metamodel.FreeClassOrInterface classOrInterface = (FreeClassOrInterface) getOrCreateMetamodel((com.redhat.ceylon.compiler.typechecker.model.ClassOrInterface) container); // now find the method ceylon.language.meta.declaration.FunctionDeclaration ret = classOrInterface.findMethod(method.getName()); if(ret == null) throw new RuntimeException("Failed to find method "+method.getName()+" in "+container); return ret; } if(container instanceof com.redhat.ceylon.compiler.typechecker.model.Package){ ceylon.language.meta.declaration.Package pkg = getOrCreateMetamodel((com.redhat.ceylon.compiler.typechecker.model.Package) container); ceylon.language.meta.declaration.FunctionDeclaration ret = pkg.getFunction(method.getName()); if(ret == null) throw new RuntimeException("Failed to find method "+method.getName()+" in "+container); return ret; } throw new RuntimeException("Unsupported method container for "+method.getName()+": "+container); } public static com.redhat.ceylon.compiler.typechecker.model.ProducedType getModel(ceylon.language.meta.declaration.OpenType pt) { if(pt instanceof FreeClassOrInterfaceType) return ((FreeClassOrInterfaceType)pt).producedType; throw new RuntimeException("Unsupported produced type: " + pt); } public static com.redhat.ceylon.compiler.typechecker.model.ProducedType getModel(ceylon.language.meta.model.Type<?> pt) { if(pt instanceof AppliedClassOrInterface) return ((AppliedClassOrInterface<?>)pt).producedType; if(pt instanceof AppliedUnionType<?>) return ((AppliedUnionType<?>)pt).model; if(pt instanceof AppliedIntersectionType<?>) return ((AppliedIntersectionType<?>)pt).model; if(pt instanceof ceylon.language.meta.model.nothingType_) return new NothingType(moduleManager.getModelLoader().getUnit()).getType(); throw new RuntimeException("Unsupported applied produced type: " + pt); } public static com.redhat.ceylon.compiler.typechecker.model.Package getPackage(com.redhat.ceylon.compiler.typechecker.model.Declaration declaration) { Scope scope = declaration.getContainer(); while(scope != null && scope instanceof com.redhat.ceylon.compiler.typechecker.model.Package == false) scope = scope.getContainer(); if(scope == null) throw new RuntimeException("Declaration with no package: "+declaration); return (com.redhat.ceylon.compiler.typechecker.model.Package)scope; } public static java.util.List<com.redhat.ceylon.compiler.typechecker.model.ProducedType> getProducedTypes(Sequential<? extends ceylon.language.meta.model.Type<?>> types) { Iterator<?> iterator = types.iterator(); Object it; List<com.redhat.ceylon.compiler.typechecker.model.ProducedType> producedTypes = new LinkedList<com.redhat.ceylon.compiler.typechecker.model.ProducedType>(); while((it = iterator.next()) != finished_.get_()){ ceylon.language.meta.model.Type<?> pt = (ceylon.language.meta.model.Type<?>) it; com.redhat.ceylon.compiler.typechecker.model.ProducedType modelPt = Metamodel.getModel(pt); producedTypes.add(modelPt); } return producedTypes; } /** * returns the java.lang.Class of the given the Ceylon metamodel of * an annotation class. */ public static <Value extends ConstrainedAnnotation<? extends Value, ? extends Values, ? super ProgramElement>, Values, ProgramElement extends Annotated> Class<?> getReflectedAnnotationClass( ClassOrInterface<? extends ConstrainedAnnotation<? extends Value, ? extends Values, ? super ProgramElement>> annotationType) { FreeClassOrInterface freeClass; if (annotationType instanceof AppliedClassOrInterface) { freeClass = (FreeClassOrInterface)(annotationType.getDeclaration()); } else { freeClass = (FreeClassOrInterface)annotationType; } final Class<?> refAnnotationClass = getJavaClass(freeClass.declaration); return refAnnotationClass; } @SuppressWarnings("unchecked") private static <A extends ceylon.language.Annotation> void addAnnotation( SequenceBuilder<A> ceylonAnnotations, java.lang.annotation.Annotation jAnnotation, Predicates.Predicate<A> pred) { Class<? extends java.lang.annotation.Annotation> jAnnotationType = jAnnotation.annotationType(); if (pred != null && pred instanceof Predicates.AnnotationPredicate && !((Predicates.AnnotationPredicate<A>)pred).shouldInstantiate(jAnnotationType)) { return; } if (jAnnotationType.getAnnotation(Ceylon.class) == null) { // It's a Java annotation addProxyCeylonAnnotation(ceylonAnnotations, jAnnotation); return; } if (jAnnotationType.getName().endsWith("$annotations$")) { java.lang.annotation.Annotation[] jAnnotations; try { jAnnotations = (java.lang.annotation.Annotation[])jAnnotationType.getMethod("value").invoke(jAnnotation); } catch (ReflectiveOperationException e) { throw new RuntimeException("While unwrapping a sequenced annotation", e); } for (java.lang.annotation.Annotation wrapped : jAnnotations) { addAnnotation(ceylonAnnotations, wrapped, pred); } } else { // Find the annotation class String annotationName = jAnnotationType.getName(); if (!annotationName.endsWith("$annotation$")) { throw new RuntimeException(); } String className = annotationName.substring(0, annotationName.length() - "$annotation$".length()); java.lang.Class<A> annotationClass; try { annotationClass = (java.lang.Class<A>)Class.forName(className, false, jAnnotationType.getClassLoader()); } catch (ClassNotFoundException e) { throw new RuntimeException("Unable to find annotation class " + className + " for annotation type " + annotationName, e); } // Invoke it with the jAnnotation as the only argument try { Constructor<A> constructor = annotationClass.getDeclaredConstructor(jAnnotationType); constructor.setAccessible(true); A cAnnotation = constructor.newInstance(jAnnotation); if (pred.accept(cAnnotation)) { ceylonAnnotations.append(cAnnotation); } } catch (ReflectiveOperationException e) { throw new RuntimeException("While reflectively instantiating " + annotationClass, e); } } } private static void addProxyCeylonAnnotation( SequenceBuilder<? extends ceylon.language.Annotation> ceylonAnnotations, java.lang.annotation.Annotation jAnnotation) { Class<? extends java.lang.annotation.Annotation> jAnnotationType = jAnnotation.annotationType(); InvocationHandler handler = new InvocationHandler() { @Override public Object invoke(Object proxy, java.lang.reflect.Method method, Object[] args) throws Throwable { // TODO Auto-generated method stub // return null; } }; java.lang.reflect.Proxy.newProxyInstance(jAnnotationType.getClassLoader(), new Class[]{jAnnotationType, ceylon.language.Annotation.class}, handler); } public static <A extends ceylon.language.Annotation> Sequential<? extends A> annotations( TypeDescriptor $reifiedValues, Annotated annotated) { // TODO If the annotated is not a valid target for the annotationType // we can return empty immediately Predicates.Predicate<A> predicate = Predicates.isAnnotationOfType($reifiedValues); return annotations($reifiedValues, annotated, predicate); } public static <A extends ceylon.language.Annotation> Sequential<? extends A> annotations(TypeDescriptor $reifiedValues, Annotated annotated, Predicates.Predicate<A> predicate) { java.lang.annotation.Annotation[] jAnnotations = ((AnnotationBearing)annotated).$getJavaAnnotations$(); if (jAnnotations == null) { throw new RuntimeException("Unable to find java.lang.reflect.AnnotatedElement for " + annotated); } // TODO Fix initial size estimate when query for OptionalAnnotation SequenceBuilder<A> ceylonAnnotations = new SequenceBuilder<A>($reifiedValues, jAnnotations.length); for (java.lang.annotation.Annotation jAnnotation: jAnnotations) { addAnnotation(ceylonAnnotations, jAnnotation, predicate); } return ceylonAnnotations.getSequence(); } public static String getJavaMethodName(Method method) { // FIXME: introduce a damn interface for getRealName() if(method instanceof JavaMethod) return ((JavaMethod)method).getRealName(); else if(method instanceof LazyMethod){ return ((LazyMethod)method).getRealMethodName(); }else throw new RuntimeException("Function declaration type not supported yet: "+method); } public static int getFirstDefaultedParameter(List<Parameter> parameters) { int i = 0; for(Parameter param : parameters){ if(param.isDefaulted()){ return i; } i++; } return -1; } public static int getVariadicParameter(List<Parameter> parameters) { int i = 0; for(Parameter param : parameters){ if(param.isSequenced()){ return i; } i++; } return -1; } public static Sequential<? extends ceylon.language.meta.declaration.Module> getModuleList() { // FIXME: this probably needs synchronisation to avoid new modules loaded during traversal Set<com.redhat.ceylon.compiler.typechecker.model.Module> modules = moduleManager.getContext().getModules().getListOfModules(); ceylon.language.meta.declaration.Module[] array = new ceylon.language.meta.declaration.Module[modules.size()]; int i=0; for(com.redhat.ceylon.compiler.typechecker.model.Module module : modules){ array[i++] = getOrCreateMetamodel(module); } return Util.sequentialInstance(Module.$TypeDescriptor$, array); } public static ceylon.language.meta.declaration.Module findLoadedModule(String name, String version) { // FIXME: this probably needs synchronisation to avoid new modules loaded during traversal com.redhat.ceylon.compiler.typechecker.model.Module module = moduleManager.findLoadedModule(name, version); return module != null ? getOrCreateMetamodel(module) : null; } public static Module getDefaultModule() { com.redhat.ceylon.compiler.typechecker.model.Module module = moduleManager.getContext().getModules().getDefaultModule(); return module != null ? getOrCreateMetamodel(module) : null; } public static List<ProducedType> getParameterProducedTypes(List<Parameter> parameters, ProducedReference producedReference) { List<ProducedType> parameterProducedTypes = new ArrayList<ProducedType>(parameters.size()); for(Parameter parameter : parameters){ ProducedType ft = producedReference.getTypedParameter(parameter).getFullType(); parameterProducedTypes.add(ft); } return parameterProducedTypes; } public static boolean isCeylon(com.redhat.ceylon.compiler.typechecker.model.ClassOrInterface declaration){ if(declaration instanceof LazyClass) return ((LazyClass) declaration).isCeylon(); if(declaration instanceof LazyInterface) return ((LazyInterface) declaration).isCeylon(); throw new RuntimeException("Declaration type not supported: "+declaration); } public static TypeDescriptor getTypeDescriptorForArguments(com.redhat.ceylon.compiler.typechecker.model.Unit unit, com.redhat.ceylon.compiler.typechecker.model.Functional decl, ProducedReference producedReference) { if(!decl.getParameterLists().isEmpty()){ List<Parameter> parameters = decl.getParameterLists().get(0).getParameters(); com.redhat.ceylon.compiler.typechecker.model.ProducedType tupleType = unit.getParameterTypesAsTupleType(parameters, producedReference); return Metamodel.getTypeDescriptorForProducedType(tupleType); }else{ return TypeDescriptor.NothingType; } } public static ProducedType getProducedTypeForArguments(com.redhat.ceylon.compiler.typechecker.model.Unit unit, com.redhat.ceylon.compiler.typechecker.model.Functional decl, ProducedReference producedReference) { if(!decl.getParameterLists().isEmpty()){ List<Parameter> parameters = decl.getParameterLists().get(0).getParameters(); return unit.getParameterTypesAsTupleType(parameters, producedReference); }else{ return new NothingType(unit).getType(); } } /** * This is also used by generated code in the JVM compiler, for type declaration literals. * In theory this can only be used for ClassOrInterface or TypeAlias. */ public static ceylon.language.meta.declaration.NestableDeclaration getOrCreateMetamodel(java.lang.Class<?> klass){ // FIXME: is this really enough? String typeName = klass.getName(); com.redhat.ceylon.compiler.typechecker.model.Module module = moduleManager.findModuleForClass(klass); com.redhat.ceylon.compiler.typechecker.model.TypeDeclaration decl = (com.redhat.ceylon.compiler.typechecker.model.TypeDeclaration) moduleManager.getModelLoader().getDeclaration(module, typeName, DeclarationType.TYPE); return (ceylon.language.meta.declaration.NestableDeclaration) getOrCreateMetamodel(decl); } public static TypeDescriptor getTypeDescriptorForFunction(ProducedReference appliedFunction) { return getTypeDescriptorForProducedType(getFunctionReturnType(appliedFunction)); } public static ProducedType getFunctionReturnType(ProducedReference appliedFunction) { // pull the return type out of the Callable ProducedType fullType = appliedFunction.getFullType(); return fullType.getTypeArgumentList().get(0); } public static com.redhat.ceylon.compiler.typechecker.model.Parameter getParameterFromTypedDeclaration(com.redhat.ceylon.compiler.typechecker.model.TypedDeclaration declaration) { if(declaration instanceof com.redhat.ceylon.compiler.typechecker.model.MethodOrValue) return ((com.redhat.ceylon.compiler.typechecker.model.MethodOrValue) declaration).getInitializerParameter(); return null; } /** * Called when an annotation class is instantiated via an annotation * constructor or annotation callsite to convert the String representation * of a Declaration literal back into the corresponding Declaration. */ @SuppressWarnings("unchecked") public static <T extends ceylon.language.meta.declaration.Declaration> T parseMetamodelReference(String ref/*, java.lang.Class<?> klass*/) { DeclarationParser parser = new DeclarationParser(); return (T)parser.ref(ref); } /** * Called when an annotation class is instantiated via an annotation * constructor or annotation callsite to convert an array of String representations * of Declaration literals back into a Sequential of Declarations. */ @SuppressWarnings("unchecked") public static <T extends ceylon.language.meta.declaration.Declaration> Sequential<T> parseMetamodelReferences(TypeDescriptor $reifiedElement, String[] refs) { DeclarationParser parser = new DeclarationParser(); ceylon.language.meta.declaration.Declaration[] array = new ceylon.language.meta.declaration.Declaration[refs.length]; for (int ii = 0; ii < refs.length; ii++) { array[ii] = (T)parser.ref(refs[ii]); } return ArraySequence.instance($reifiedElement, array); } @SuppressWarnings("unchecked") public static <T> T parseEnumerationReference(java.lang.Class<T> klass) { FreeClassOrInterface decl = (FreeClassOrInterface)getOrCreateMetamodel(klass); String getterName = Naming.getGetterName(decl.declaration); try { java.lang.reflect.Method method = klass.getMethod(getterName); return (T)method.invoke(null); } catch (ReflectiveOperationException e) { throw new RuntimeException(e); } } public static <T> Sequential<T> parseEnumerationReferences(TypeDescriptor $reifiedElement, java.lang.Class<?>[] refs) { Object[] array = new Object[refs.length]; for (int ii = 0; ii < refs.length; ii++) { array[ii] = parseEnumerationReference(refs[ii]); } return ArraySequence.instance($reifiedElement, array); } public static Sequential<? extends ceylon.language.meta.declaration.TypeParameter> getTypeParameters(com.redhat.ceylon.compiler.typechecker.model.Generic declaration) { List<com.redhat.ceylon.compiler.typechecker.model.TypeParameter> typeParameters = declaration.getTypeParameters(); ceylon.language.meta.declaration.TypeParameter[] typeParametersArray = new ceylon.language.meta.declaration.TypeParameter[typeParameters.size()]; int i=0; for(com.redhat.ceylon.compiler.typechecker.model.TypeParameter tp : typeParameters){ typeParametersArray[i++] = new com.redhat.ceylon.compiler.java.runtime.metamodel.FreeTypeParameter(tp); } return Util.sequentialInstance(ceylon.language.meta.declaration.TypeParameter.$TypeDescriptor$, typeParametersArray); } @SuppressWarnings("hiding") public static <DeclarationType extends ceylon.language.meta.declaration.Declaration> DeclarationType findDeclarationByName(Sequential<? extends DeclarationType> declarations, String name) { Iterator<? extends DeclarationType> iterator = declarations.iterator(); Object it; while((it = iterator.next()) != finished_.get_()){ @SuppressWarnings("unchecked") DeclarationType tp = (DeclarationType) it; if(tp.getName().equals(name)) return tp; } return null; } public static AnnotatedDeclaration getContainer(Declaration declaration) { Scope container = declaration.getContainer(); if(container instanceof com.redhat.ceylon.compiler.typechecker.model.Declaration) return Metamodel.getOrCreateMetamodel((com.redhat.ceylon.compiler.typechecker.model.Declaration)container); if(container instanceof com.redhat.ceylon.compiler.typechecker.model.Package) return Metamodel.getOrCreateMetamodel((com.redhat.ceylon.compiler.typechecker.model.Package)container); // FIXME: can that happen? throw new RuntimeException("Illegal container type: "+container); } public static boolean isLocalType(com.redhat.ceylon.compiler.typechecker.model.Class decl) { return ((LazyElement)decl).isLocal(); } public static ceylon.language.Map<? extends ceylon.language.meta.declaration.TypeParameter, ? extends ceylon.language.meta.model.Type<?>> getTypeArguments(ceylon.language.meta.declaration.GenericDeclaration declaration, ProducedReference appliedFunction) { java.util.Map<ceylon.language.meta.declaration.TypeParameter, ceylon.language.meta.model.Type<?>> typeArguments = new LinkedHashMap<ceylon.language.meta.declaration.TypeParameter, ceylon.language.meta.model.Type<?>>(); Iterator<? extends ceylon.language.meta.declaration.TypeParameter> typeParameters = declaration.getTypeParameterDeclarations().iterator(); Object it; java.util.Map<com.redhat.ceylon.compiler.typechecker.model.TypeParameter, com.redhat.ceylon.compiler.typechecker.model.ProducedType> ptArguments = appliedFunction.getTypeArguments(); while((it = typeParameters.next()) != finished_.get_()){ com.redhat.ceylon.compiler.java.runtime.metamodel.FreeTypeParameter tp = (com.redhat.ceylon.compiler.java.runtime.metamodel.FreeTypeParameter) it; com.redhat.ceylon.compiler.typechecker.model.TypeParameter tpDecl = (com.redhat.ceylon.compiler.typechecker.model.TypeParameter) tp.declaration; com.redhat.ceylon.compiler.typechecker.model.ProducedType ptArg = ptArguments.get(tpDecl); ceylon.language.meta.model.Type<?> ptArgWrapped = Metamodel.getAppliedMetamodel(ptArg); typeArguments.put(tp, ptArgWrapped); } return new InternalMap<ceylon.language.meta.declaration.TypeParameter, ceylon.language.meta.model.Type<?>>(ceylon.language.meta.declaration.TypeParameter.$TypeDescriptor$, TypeDescriptor.klass(ceylon.language.meta.model.Type.class, ceylon.language.Anything.$TypeDescriptor$), typeArguments); } public static String toTypeString(ceylon.language.meta.declaration.NestableDeclaration declaration, ceylon.language.Map<? extends ceylon.language.meta.declaration.TypeParameter, ?> typeArguments){ StringBuffer string = new StringBuffer(); string.append(declaration.getName()); if(declaration instanceof ceylon.language.meta.declaration.GenericDeclaration) addTypeArguments(string, (ceylon.language.meta.declaration.GenericDeclaration)declaration, typeArguments); java.lang.Object container = declaration.getContainer(); while(container != null){ if(container instanceof Package) return ((Package)container).getName() + "::" + string; StringBuffer string2 = new StringBuffer(((NestableDeclaration)container).getName()); if(container instanceof ceylon.language.meta.declaration.GenericDeclaration) addTypeArguments(string2, (ceylon.language.meta.declaration.GenericDeclaration)container, typeArguments); string2.append("."); string.insert(0, string2.toString()); container = ((NestableDeclaration)container).getContainer(); } return string.toString(); } private static void addTypeArguments(StringBuffer string, ceylon.language.meta.declaration.GenericDeclaration declaration, ceylon.language.Map<? extends ceylon.language.meta.declaration.TypeParameter, ?> typeArguments) { if(!declaration.getTypeParameterDeclarations().getEmpty()){ string.append("<"); Iterator<?> iterator = declaration.getTypeParameterDeclarations().iterator(); Object it; boolean once = true; while((it = iterator.next()) != finished_.get_()){ if(once) once = false; else string.append(","); ceylon.language.meta.declaration.TypeParameter tpDecl = (ceylon.language.meta.declaration.TypeParameter) it; Object val = typeArguments != null ? typeArguments.get(tpDecl) : null; string.append(val != null ? val : "##Missing##"); } string.append(">"); } } public static String toTypeString(ceylon.language.meta.model.Model model){ StringBuffer string = new StringBuffer(); ceylon.language.meta.model.Type<?> container = model.getContainer(); if(container == null){ string.append(model.getDeclaration().getContainingPackage().getName()).append("::"); }else if(container instanceof ceylon.language.meta.model.ClassOrInterface<?>){ string.append(container.toString()).append("."); }else{ string.append("<").append(container.toString()).append(">."); } string.append(model.getDeclaration().getName()); if(model instanceof ceylon.language.meta.model.Generic) addTypeArguments(string, (ceylon.language.meta.declaration.GenericDeclaration) model.getDeclaration(), ((ceylon.language.meta.model.Generic)model).getTypeArguments()); return string.toString(); } public static void checkTypeArguments(ProducedType qualifyingType, Declaration declaration, List<ProducedType> typeArguments) { if(declaration instanceof com.redhat.ceylon.compiler.typechecker.model.Generic){ List<com.redhat.ceylon.compiler.typechecker.model.TypeParameter> typeParameters = ((com.redhat.ceylon.compiler.typechecker.model.Generic) declaration).getTypeParameters(); if(typeParameters.size() < typeArguments.size()) throw new TypeApplicationException("Too many type arguments provided: "+typeArguments.size()+", but only accepts "+typeParameters.size()); int min = 0; for (TypeParameter tp: typeParameters) { if (!tp.isDefaulted()) min++; } if(typeArguments.size() < min){ String requires = (min == typeParameters.size()) ? "exactly" : "at least"; throw new TypeApplicationException("Not enough type arguments provided: "+typeArguments.size()+", but requires "+requires+" "+min); } for(int i=0;i<typeArguments.size();i++){ ProducedType typeArgument = typeArguments.get(i); com.redhat.ceylon.compiler.typechecker.model.TypeParameter typeParameter = typeParameters.get(i); for (ProducedType st: typeParameter.getSatisfiedTypes()) { ProducedType sts = st.getProducedType(qualifyingType, declaration, typeArguments); if (!typeArgument.isSubtypeOf(sts)) { throw new TypeApplicationException("Type argument "+i+": "+typeArgument.getProducedTypeQualifiedName() +" does not conform to upper bound constraint: "+sts.getProducedTypeQualifiedName() +" of type parameter "+typeParameter.getQualifiedNameString()); } } if(!ExpressionVisitor.argumentSatisfiesEnumeratedConstraint(qualifyingType, declaration, typeArguments, typeArgument, typeParameter)){ throw new TypeApplicationException("Type argument "+i+": "+typeArgument.getProducedTypeQualifiedName() +" does not conform to enumerated constraints " +" of type parameter "+typeParameter.getQualifiedNameString()); } } }else{ if(!typeArguments.isEmpty()) throw new TypeApplicationException("Declaration does not accept type arguments"); } } public static boolean isTypeOf(ProducedType producedType, Object instance) { ProducedType instanceType = Metamodel.getProducedType(instance); return instanceType.isSubtypeOf(producedType); } public static boolean isSuperTypeOf(ProducedType a, ceylon.language.meta.model.Type<? extends Object> type) { ProducedType b = Metamodel.getModel(type); return a.isSupertypeOf(b); } public static boolean isSubTypeOf(ProducedType a, ceylon.language.meta.model.Type<? extends Object> type) { ProducedType b = Metamodel.getModel(type); return a.isSubtypeOf(b); } public static boolean isExactly(ProducedType a, ceylon.language.meta.model.Type<? extends Object> type) { ProducedType b = Metamodel.getModel(type); return a.isExactly(b); } public static void checkReifiedTypeArgument(String methodName, String className, Variance variance, ProducedType appliedType, TypeDescriptor $reifiedType) { ProducedType expectedReifiedType = Metamodel.getProducedType($reifiedType); boolean check = checkReifiedTypeArgument(variance, appliedType, expectedReifiedType); if(!check){ String appliedTypeString = appliedType.getProducedTypeName(); String expectedReifiedTypeString = expectedReifiedType.getProducedTypeName(); String appliedString = className.replace("$1", appliedTypeString); String expectedString = className.replace("$1", expectedReifiedTypeString); throw new IncompatibleTypeException("Incompatible type: actual type of applied declaration is "+appliedString +" is not compatible with expected type: "+expectedString+". Try passing the type argument explicitly with: " +methodName+"<"+appliedTypeString+">()"); } } public static void checkReifiedTypeArgument(String methodName, String className, Variance variance1, ProducedType appliedType1, TypeDescriptor $reifiedType1, Variance variance2, ProducedType appliedType2, TypeDescriptor $reifiedType2) { ProducedType expectedReifiedType1 = Metamodel.getProducedType($reifiedType1); ProducedType expectedReifiedType2 = Metamodel.getProducedType($reifiedType2); boolean check1 = checkReifiedTypeArgument(variance1, appliedType1, expectedReifiedType1); boolean check2 = checkReifiedTypeArgument(variance2, appliedType2, expectedReifiedType2); if(!check1 || !check2){ String appliedTypeString1 = appliedType1.getProducedTypeName(); String expectedReifiedTypeString1 = expectedReifiedType1.getProducedTypeName(); String appliedTypeString2 = appliedType2.getProducedTypeName(); String expectedReifiedTypeString2 = expectedReifiedType2.getProducedTypeName(); String appliedString = className.replace("$1", appliedTypeString1).replace("$2", appliedTypeString2); String expectedString = className.replace("$1", expectedReifiedTypeString1).replace("$2", expectedReifiedTypeString2); throw new IncompatibleTypeException("Incompatible type: actual type of applied declaration is "+appliedString +" is not compatible with expected type: "+expectedString+". Try passing the type argument explicitly with: " +methodName+"<"+appliedTypeString1+","+appliedTypeString2+">()"); } } public static void checkReifiedTypeArgument(String methodName, String className, Variance variance1, ProducedType appliedType1, TypeDescriptor $reifiedType1, Variance variance2, ProducedType appliedType2, TypeDescriptor $reifiedType2, Variance variance3, ProducedType appliedType3, TypeDescriptor $reifiedType3) { ProducedType expectedReifiedType1 = Metamodel.getProducedType($reifiedType1); ProducedType expectedReifiedType2 = Metamodel.getProducedType($reifiedType2); ProducedType expectedReifiedType3 = Metamodel.getProducedType($reifiedType3); boolean check1 = checkReifiedTypeArgument(variance1, appliedType1, expectedReifiedType1); boolean check2 = checkReifiedTypeArgument(variance2, appliedType2, expectedReifiedType2); boolean check3 = checkReifiedTypeArgument(variance3, appliedType3, expectedReifiedType3); if(!check1 || !check2 || !check3){ String appliedTypeString1 = appliedType1.getProducedTypeName(); String expectedReifiedTypeString1 = expectedReifiedType1.getProducedTypeName(); String appliedTypeString2 = appliedType2.getProducedTypeName(); String expectedReifiedTypeString2 = expectedReifiedType2.getProducedTypeName(); String appliedTypeString3 = appliedType3.getProducedTypeName(); String expectedReifiedTypeString3 = expectedReifiedType3.getProducedTypeName(); String appliedString = className.replace("$1", appliedTypeString1).replace("$2", appliedTypeString2).replace("$3", appliedTypeString3); String expectedString = className.replace("$1", expectedReifiedTypeString1).replace("$2", expectedReifiedTypeString2).replace("$3", expectedReifiedTypeString3); throw new IncompatibleTypeException("Incompatible type: actual type of applied declaration is "+appliedString +" is not compatible with expected type: "+expectedString+". Try passing the type argument explicitly with: " +methodName+"<"+appliedTypeString1+","+appliedTypeString2+","+appliedTypeString3+">()"); } } private static boolean checkReifiedTypeArgument(Variance variance, ProducedType appliedType, ProducedType expectedReifiedType) { switch(variance){ case IN: return appliedType.isSupertypeOf(expectedReifiedType); case OUT: return appliedType.isSubtypeOf(expectedReifiedType); case NONE: return appliedType.isExactly(expectedReifiedType); default: throw new RuntimeException("Invalid variance: "+variance); } } public static void checkQualifyingType(ProducedType qualifyingType, Declaration declaration) { Scope container = declaration.getContainer(); if(container instanceof TypeDeclaration == false) throw new IncompatibleTypeException("Declaration container is not a type: "+container); TypeDeclaration typeDecl = (TypeDeclaration) container; ProducedType supertype = qualifyingType.getSupertype(typeDecl); if(supertype == null) throw new IncompatibleTypeException("Invalid container type: "+qualifyingType+" is not a subtype of "+typeDecl); } public static <Return> Return apply(Callable<? extends Return> function, Sequential<?> arguments, List<ProducedType> parameterProducedTypes, int firstDefaulted, int variadicIndex){ int argumentCount = (int) arguments.getSize(); int parameters = parameterProducedTypes.size(); // check minimum if(firstDefaulted == -1){ if(argumentCount < parameters) throw new InvocationException("Not enough arguments to function. Expected "+parameters+" but got only "+argumentCount); }else if(argumentCount < firstDefaulted) throw new InvocationException("Not enough arguments to function. Expected at least "+firstDefaulted+" but got only "+argumentCount); // check maximum if(variadicIndex == -1){ if(argumentCount > parameters) throw new InvocationException("To many arguments to function. Expected at most "+parameters+" but got "+argumentCount); }// if we're variadic we accept any number // now check their types Iterator<?> it = arguments.iterator(); Object arg; int i = 0; ProducedType variadicElement = null; if(variadicIndex != -1) // it must be a Sequential<T> variadicElement = parameterProducedTypes.get(variadicIndex).getTypeArgumentList().get(0); while((arg = it.next()) != finished_.get_()){ ProducedType parameterType = variadicIndex == -1 || i < variadicIndex ? // normal param parameterProducedTypes.get(i) // variadic param : variadicElement; ProducedType argumentType = Metamodel.getProducedType(arg); if(!argumentType.isSubtypeOf(parameterType)) throw new IncompatibleTypeException("Invalid argument "+i+", expected type "+parameterType+" but got "+argumentType); i++; } // they are all good, let's call it return Util.apply(function, arguments); } public static ceylon.language.meta.model.Model bind(ceylon.language.meta.model.Member<?,?> member, ProducedType containerType, Object container){ if(container == null) throw new IncompatibleTypeException("Invalid container "+container+", expected type "+containerType+" but got ceylon.language::Null"); ProducedType argumentType = Metamodel.getProducedType(container); if(!argumentType.isSubtypeOf(containerType)) throw new IncompatibleTypeException("Invalid container "+container+", expected type "+containerType+" but got "+argumentType); return member.$call$(container); } }
runtime/com/redhat/ceylon/compiler/java/runtime/metamodel/Metamodel.java
package com.redhat.ceylon.compiler.java.runtime.metamodel; import java.lang.reflect.Constructor; import java.lang.reflect.InvocationHandler; import java.util.ArrayList; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Set; import ceylon.language.Anything; import ceylon.language.ArraySequence; import ceylon.language.Callable; import ceylon.language.Iterator; import ceylon.language.Null; import ceylon.language.SequenceBuilder; import ceylon.language.Sequential; import ceylon.language.empty_; import ceylon.language.finished_; import ceylon.language.Annotated; import ceylon.language.meta.model.ClassOrInterface; import ceylon.language.meta.model.IncompatibleTypeException; import ceylon.language.meta.model.InvocationException; import ceylon.language.ConstrainedAnnotation; import ceylon.language.meta.model.TypeApplicationException; import ceylon.language.meta.declaration.AnnotatedDeclaration; import ceylon.language.meta.declaration.Module; import ceylon.language.meta.declaration.Package; import ceylon.language.meta.declaration.NestableDeclaration; import com.redhat.ceylon.cmr.api.ArtifactResult; import com.redhat.ceylon.cmr.api.Logger; import com.redhat.ceylon.cmr.api.RepositoryManager; import com.redhat.ceylon.cmr.api.RepositoryManagerBuilder; import com.redhat.ceylon.compiler.java.Util; import com.redhat.ceylon.compiler.java.codegen.Naming; import com.redhat.ceylon.compiler.java.language.BooleanArray; import com.redhat.ceylon.compiler.java.language.ByteArray; import com.redhat.ceylon.compiler.java.language.CharArray; import com.redhat.ceylon.compiler.java.language.DoubleArray; import com.redhat.ceylon.compiler.java.language.FloatArray; import com.redhat.ceylon.compiler.java.language.IntArray; import com.redhat.ceylon.compiler.java.language.InternalMap; import com.redhat.ceylon.compiler.java.language.LongArray; import com.redhat.ceylon.compiler.java.language.ObjectArray; import com.redhat.ceylon.compiler.java.language.ShortArray; import com.redhat.ceylon.compiler.java.metadata.Ceylon; import com.redhat.ceylon.compiler.java.metadata.Variance; import com.redhat.ceylon.compiler.java.runtime.model.ReifiedType; import com.redhat.ceylon.compiler.java.runtime.model.RuntimeModuleManager; import com.redhat.ceylon.compiler.java.runtime.model.TypeDescriptor; import com.redhat.ceylon.compiler.loader.ModelLoader.DeclarationType; import com.redhat.ceylon.compiler.loader.impl.reflect.mirror.ReflectionClass; import com.redhat.ceylon.compiler.loader.impl.reflect.mirror.ReflectionMethod; import com.redhat.ceylon.compiler.loader.model.JavaMethod; import com.redhat.ceylon.compiler.loader.model.LazyClass; import com.redhat.ceylon.compiler.loader.model.LazyClassAlias; import com.redhat.ceylon.compiler.loader.model.LazyElement; import com.redhat.ceylon.compiler.loader.model.LazyInterface; import com.redhat.ceylon.compiler.loader.model.LazyMethod; import com.redhat.ceylon.compiler.loader.model.LazyPackage; import com.redhat.ceylon.compiler.loader.model.LazyValue; import com.redhat.ceylon.compiler.typechecker.analyzer.ExpressionVisitor; import com.redhat.ceylon.compiler.typechecker.context.Context; import com.redhat.ceylon.compiler.typechecker.io.VFS; import com.redhat.ceylon.compiler.typechecker.model.Declaration; import com.redhat.ceylon.compiler.typechecker.model.Functional; import com.redhat.ceylon.compiler.typechecker.model.Method; import com.redhat.ceylon.compiler.typechecker.model.NothingType; import com.redhat.ceylon.compiler.typechecker.model.Parameter; import com.redhat.ceylon.compiler.typechecker.model.ProducedReference; import com.redhat.ceylon.compiler.typechecker.model.ProducedType; import com.redhat.ceylon.compiler.typechecker.model.Scope; import com.redhat.ceylon.compiler.typechecker.model.TypeDeclaration; import com.redhat.ceylon.compiler.typechecker.model.TypeParameter; public class Metamodel { private static RuntimeModuleManager moduleManager; // FIXME: this will need better thinking in terms of memory usage private static Map<com.redhat.ceylon.compiler.typechecker.model.Declaration, com.redhat.ceylon.compiler.java.runtime.metamodel.FreeNestableDeclaration> typeCheckModelToRuntimeModel = new HashMap<com.redhat.ceylon.compiler.typechecker.model.Declaration, com.redhat.ceylon.compiler.java.runtime.metamodel.FreeNestableDeclaration>(); private static Map<com.redhat.ceylon.compiler.typechecker.model.Package, com.redhat.ceylon.compiler.java.runtime.metamodel.FreePackage> typeCheckPackagesToRuntimeModel = new HashMap<com.redhat.ceylon.compiler.typechecker.model.Package, com.redhat.ceylon.compiler.java.runtime.metamodel.FreePackage>(); private static Map<com.redhat.ceylon.compiler.typechecker.model.Module, com.redhat.ceylon.compiler.java.runtime.metamodel.FreeModule> typeCheckModulesToRuntimeModel = new HashMap<com.redhat.ceylon.compiler.typechecker.model.Module, com.redhat.ceylon.compiler.java.runtime.metamodel.FreeModule>(); static{ resetModuleManager(); } public static void loadModule(String name, String version, ArtifactResult result, ClassLoader classLoader){ moduleManager.loadModule(name, version, result, classLoader); } public static void resetModuleManager() { RepositoryManagerBuilder builder = new RepositoryManagerBuilder(new Logger(){ @Override public void error(String str) { System.err.println("ERROR: "+str); } @Override public void warning(String str) { System.err.println("WARN: "+str); } @Override public void info(String str) { System.err.println("INFO: "+str); } @Override public void debug(String str) { System.err.println("DEBUG: "+str); } }, false); RepositoryManager repoManager = builder.buildRepository(); VFS vfs = new VFS(); Context context = new Context(repoManager, vfs); moduleManager = new RuntimeModuleManager(context); moduleManager.initCoreModules(); moduleManager.prepareForTypeChecking(); typeCheckModelToRuntimeModel.clear(); typeCheckModulesToRuntimeModel.clear(); typeCheckPackagesToRuntimeModel.clear(); } public static Object getLock(){ return moduleManager.getModelLoader(); } public static TypeDescriptor getTypeDescriptor(Object instance) { if(instance == null) return Null.$TypeDescriptor$; else if(instance instanceof ReifiedType) return((ReifiedType) instance).$getType$(); else return getJavaTypeDescriptor(instance.getClass()); } private static TypeDescriptor getJavaArrayTypeDescriptor(Class<?> klass) { if(klass == byte[].class) return ByteArray.$TypeDescriptor$; if(klass == short[].class) return ShortArray.$TypeDescriptor$; if(klass == int[].class) return IntArray.$TypeDescriptor$; if(klass == long[].class) return LongArray.$TypeDescriptor$; if(klass == float[].class) return FloatArray.$TypeDescriptor$; if(klass == double[].class) return DoubleArray.$TypeDescriptor$; if(klass == boolean[].class) return BooleanArray.$TypeDescriptor$; if(klass == char[].class) return CharArray.$TypeDescriptor$; TypeDescriptor componentType = getJavaTypeDescriptor(klass.getComponentType()); return TypeDescriptor.klass(ObjectArray.class, componentType); } private static TypeDescriptor getJavaTypeDescriptor(Class<?> klass) { if(klass.isArray()) return getJavaArrayTypeDescriptor(klass); // make sure java.lang.Object doesn't leak in the ceylon metamodel if(klass == Object.class) return ceylon.language.Object.$TypeDescriptor$; return TypeDescriptor.klass(klass); } public static boolean isReified(java.lang.Object o, TypeDescriptor type){ TypeDescriptor instanceType = getTypeDescriptor(o); if(instanceType == null) return false; return instanceType.toProducedType(moduleManager).isSubtypeOf(type.toProducedType(moduleManager)); } public static ProducedType getProducedType(Object instance) { TypeDescriptor instanceType = getTypeDescriptor(instance); if(instanceType == null) throw new RuntimeException("Metamodel not yet supported for Java types"); return getProducedType(instanceType); } public static ProducedType getProducedType(TypeDescriptor reifiedType) { return reifiedType.toProducedType(moduleManager); } public static ceylon.language.meta.model.Type<?> getAppliedMetamodel(TypeDescriptor typeDescriptor) { if(typeDescriptor == null) throw new RuntimeException("Metamodel not yet supported for Java types"); ProducedType pt = typeDescriptor.toProducedType(moduleManager); return getAppliedMetamodel(pt); } public static com.redhat.ceylon.compiler.java.runtime.metamodel.FreeNestableDeclaration getOrCreateMetamodel(com.redhat.ceylon.compiler.typechecker.model.Declaration declaration){ synchronized(getLock()){ com.redhat.ceylon.compiler.java.runtime.metamodel.FreeNestableDeclaration ret = typeCheckModelToRuntimeModel.get(declaration); if(ret == null){ if(declaration instanceof com.redhat.ceylon.compiler.typechecker.model.Class){ com.redhat.ceylon.compiler.typechecker.model.Class klass = (com.redhat.ceylon.compiler.typechecker.model.Class) declaration; ret = new com.redhat.ceylon.compiler.java.runtime.metamodel.FreeClass(klass); }else if(declaration instanceof com.redhat.ceylon.compiler.typechecker.model.Interface){ com.redhat.ceylon.compiler.typechecker.model.Interface interf = (com.redhat.ceylon.compiler.typechecker.model.Interface)declaration; ret = new com.redhat.ceylon.compiler.java.runtime.metamodel.FreeInterface(interf); }else if(declaration instanceof com.redhat.ceylon.compiler.typechecker.model.TypeAlias){ com.redhat.ceylon.compiler.typechecker.model.TypeAlias alias = (com.redhat.ceylon.compiler.typechecker.model.TypeAlias)declaration; ret = new com.redhat.ceylon.compiler.java.runtime.metamodel.FreeAliasDeclaration(alias); }else if(declaration instanceof com.redhat.ceylon.compiler.typechecker.model.Method){ com.redhat.ceylon.compiler.typechecker.model.TypedDeclaration method = (com.redhat.ceylon.compiler.typechecker.model.TypedDeclaration)declaration; ret = new com.redhat.ceylon.compiler.java.runtime.metamodel.FreeFunction(method); }else if(declaration instanceof com.redhat.ceylon.compiler.typechecker.model.Value){ com.redhat.ceylon.compiler.typechecker.model.TypedDeclaration value = (com.redhat.ceylon.compiler.typechecker.model.TypedDeclaration)declaration; ret = new FreeValue(value); }else{ throw new RuntimeException("Declaration type not supported yet: "+declaration); } typeCheckModelToRuntimeModel.put(declaration, ret); } return ret; } } public static boolean hasTypeParameters(com.redhat.ceylon.compiler.typechecker.model.TypedDeclaration model) { if(model instanceof com.redhat.ceylon.compiler.typechecker.model.Generic) return hasTypeParameters((com.redhat.ceylon.compiler.typechecker.model.Generic)model); if(model.getContainer() instanceof com.redhat.ceylon.compiler.typechecker.model.ClassOrInterface) return hasTypeParameters((com.redhat.ceylon.compiler.typechecker.model.ClassOrInterface)model.getContainer()); return false; } public static boolean hasTypeParameters(com.redhat.ceylon.compiler.typechecker.model.Generic model) { if(!model.getTypeParameters().isEmpty()) return true; Object container = ((com.redhat.ceylon.compiler.typechecker.model.Declaration)model).getContainer(); if(container instanceof com.redhat.ceylon.compiler.typechecker.model.ClassOrInterface) return hasTypeParameters((com.redhat.ceylon.compiler.typechecker.model.ClassOrInterface) container); return false; } public static com.redhat.ceylon.compiler.java.runtime.metamodel.FreePackage getOrCreateMetamodel(com.redhat.ceylon.compiler.typechecker.model.Package declaration){ synchronized(getLock()){ com.redhat.ceylon.compiler.java.runtime.metamodel.FreePackage ret = typeCheckPackagesToRuntimeModel.get(declaration); if(ret == null){ ret = new com.redhat.ceylon.compiler.java.runtime.metamodel.FreePackage(declaration); typeCheckPackagesToRuntimeModel.put(declaration, ret); } return ret; } } public static com.redhat.ceylon.compiler.java.runtime.metamodel.FreeModule getOrCreateMetamodel(com.redhat.ceylon.compiler.typechecker.model.Module declaration){ synchronized(getLock()){ com.redhat.ceylon.compiler.java.runtime.metamodel.FreeModule ret = typeCheckModulesToRuntimeModel.get(declaration); if(ret == null){ ret = new com.redhat.ceylon.compiler.java.runtime.metamodel.FreeModule(declaration); typeCheckModulesToRuntimeModel.put(declaration, ret); } return ret; } } public static ceylon.language.meta.declaration.OpenType getMetamodel(ProducedType pt) { TypeDeclaration declaration = pt.getDeclaration(); if(declaration instanceof com.redhat.ceylon.compiler.typechecker.model.Class){ return new com.redhat.ceylon.compiler.java.runtime.metamodel.FreeClassType(pt); } if(declaration instanceof com.redhat.ceylon.compiler.typechecker.model.Interface){ return new com.redhat.ceylon.compiler.java.runtime.metamodel.FreeInterfaceType(pt); } if(declaration instanceof com.redhat.ceylon.compiler.typechecker.model.TypeParameter){ com.redhat.ceylon.compiler.typechecker.model.TypeParameter tp = (com.redhat.ceylon.compiler.typechecker.model.TypeParameter) declaration; return new FreeTypeParameterType(tp); } if(declaration instanceof com.redhat.ceylon.compiler.typechecker.model.UnionType){ return new FreeUnionType((com.redhat.ceylon.compiler.typechecker.model.UnionType)declaration); } if(declaration instanceof com.redhat.ceylon.compiler.typechecker.model.IntersectionType){ return new FreeIntersectionType((com.redhat.ceylon.compiler.typechecker.model.IntersectionType)declaration); } if(declaration instanceof com.redhat.ceylon.compiler.typechecker.model.NothingType){ return ceylon.language.meta.declaration.nothingType_.get_(); } throw new RuntimeException("Declaration type not supported yet: "+declaration); } @SuppressWarnings({ "unchecked", "rawtypes" }) public static Sequential<? extends ceylon.language.meta.declaration.OpenType> getMetamodelSequential(List<ProducedType> types) { if(types.isEmpty()) return (Sequential<? extends ceylon.language.meta.declaration.OpenType>)(Sequential)empty_.get_(); ceylon.language.meta.declaration.OpenType[] ret = new ceylon.language.meta.declaration.OpenType[types.size()]; int i=0; for(ProducedType pt : types){ ret[i++] = Metamodel.getMetamodel(pt); } return Util.sequentialInstance(ceylon.language.meta.declaration.OpenType.$TypeDescriptor$, ret); } @SuppressWarnings({ "unchecked", "rawtypes" }) public static Sequential<? extends ceylon.language.meta.model.Type<? extends Object>> getAppliedMetamodelSequential(List<ProducedType> types) { if(types.isEmpty()) return (Sequential<? extends ceylon.language.meta.model.Type<? extends Object>>)(Sequential)empty_.get_(); ceylon.language.meta.model.Type<?>[] ret = new ceylon.language.meta.model.Type[types.size()]; int i=0; for(ProducedType pt : types){ ret[i++] = Metamodel.getAppliedMetamodel(pt); } return Util.sequentialInstance(TypeDescriptor.klass(ceylon.language.meta.model.Type.class, Anything.$TypeDescriptor$), ret); } @SuppressWarnings({ "rawtypes", "unchecked" }) public static <T> ceylon.language.meta.model.Type<T> getAppliedMetamodel(ProducedType pt) { TypeDeclaration declaration = pt.getDeclaration(); if(declaration instanceof com.redhat.ceylon.compiler.typechecker.model.Class){ // anonymous classes don't have parameter lists TypeDescriptor reifiedArguments; if(!declaration.isAnonymous() && !isLocalType((com.redhat.ceylon.compiler.typechecker.model.Class)declaration)) reifiedArguments = Metamodel.getTypeDescriptorForArguments(declaration.getUnit(), (Functional)declaration, pt); else reifiedArguments = TypeDescriptor.NothingType; TypeDescriptor reifiedType = getTypeDescriptorForProducedType(pt); if(declaration.isToplevel()) return new com.redhat.ceylon.compiler.java.runtime.metamodel.AppliedClass(reifiedType, reifiedArguments, pt, null, null); TypeDescriptor reifiedContainer = getTypeDescriptorForProducedType(pt.getQualifyingType()); return new com.redhat.ceylon.compiler.java.runtime.metamodel.AppliedMemberClass(reifiedContainer, reifiedType, reifiedArguments, pt); } if(declaration instanceof com.redhat.ceylon.compiler.typechecker.model.Interface){ TypeDescriptor reifiedType = getTypeDescriptorForProducedType(pt); if(declaration.isToplevel()) return new com.redhat.ceylon.compiler.java.runtime.metamodel.AppliedInterface<T>(reifiedType, pt, null, null); TypeDescriptor reifiedContainer = getTypeDescriptorForProducedType(pt.getQualifyingType()); return new com.redhat.ceylon.compiler.java.runtime.metamodel.AppliedMemberInterface(reifiedContainer, reifiedType, pt); } if(declaration instanceof com.redhat.ceylon.compiler.typechecker.model.UnionType){ TypeDescriptor reifiedType = getTypeDescriptorForProducedType(pt); return new AppliedUnionType<T>(reifiedType, (com.redhat.ceylon.compiler.typechecker.model.UnionType)declaration); } if(declaration instanceof com.redhat.ceylon.compiler.typechecker.model.IntersectionType){ TypeDescriptor reifiedType = getTypeDescriptorForProducedType(pt); return new AppliedIntersectionType<T>(reifiedType, (com.redhat.ceylon.compiler.typechecker.model.IntersectionType)declaration); } if(declaration instanceof com.redhat.ceylon.compiler.typechecker.model.NothingType){ return (ceylon.language.meta.model.Type<T>)ceylon.language.meta.model.nothingType_.get_(); } throw new RuntimeException("Declaration type not supported yet: "+declaration); } public static java.lang.Class<?> getJavaClass(com.redhat.ceylon.compiler.typechecker.model.Module module) { String className = module.getNameAsString() + ".module_"; ReflectionClass classMirror = (ReflectionClass)moduleManager.getModelLoader().lookupClassMirror(module, className); return classMirror.klass; } public static java.lang.Class<?> getJavaClass(com.redhat.ceylon.compiler.typechecker.model.Package pkg) { String className = ((LazyPackage) pkg).getNameAsString()+ ".package_"; ReflectionClass classMirror = (ReflectionClass)moduleManager.getModelLoader().lookupClassMirror(pkg.getModule(), className); return classMirror != null ? classMirror.klass : null; } public static java.lang.Class<?> getJavaClass(com.redhat.ceylon.compiler.typechecker.model.Declaration declaration) { if(declaration instanceof LazyClass){ ReflectionClass classMirror = (ReflectionClass) ((LazyClass) declaration).classMirror; return classMirror.klass; } if(declaration instanceof LazyInterface){ ReflectionClass classMirror = (ReflectionClass) ((LazyInterface) declaration).classMirror; return classMirror.klass; } if(declaration instanceof LazyMethod){ ReflectionClass classMirror = (ReflectionClass) ((LazyMethod) declaration).classMirror; return classMirror.klass; } if(declaration instanceof LazyValue){ ReflectionClass classMirror = (ReflectionClass) ((LazyValue) declaration).classMirror; return classMirror.klass; } if (declaration instanceof LazyClassAlias) { ReflectionClass classMirror = (ReflectionClass) ((LazyClassAlias) declaration).classMirror; return classMirror.klass; } if(declaration.getContainer() instanceof com.redhat.ceylon.compiler.typechecker.model.Declaration){ return getJavaClass((com.redhat.ceylon.compiler.typechecker.model.Declaration)declaration.getContainer()); } throw new RuntimeException("Unsupported declaration type: " + declaration); } public static java.lang.reflect.Method getJavaMethod(com.redhat.ceylon.compiler.typechecker.model.Method declaration) { if(declaration instanceof JavaMethod){ ReflectionMethod methodMirror = (ReflectionMethod) ((JavaMethod) declaration).mirror; return (java.lang.reflect.Method) methodMirror.method; } if(declaration instanceof LazyMethod){ ReflectionMethod methodMirror = (ReflectionMethod) ((LazyMethod) declaration).getMethodMirror(); return (java.lang.reflect.Method) methodMirror.method; } throw new RuntimeException("Unsupported declaration type: " + declaration); } public static TypeDescriptor getTypeDescriptorForProducedType(com.redhat.ceylon.compiler.typechecker.model.ProducedType type) { TypeDeclaration declaration = type.getDeclaration(); if(declaration instanceof LazyClass){ ReflectionClass classMirror = (ReflectionClass) ((LazyClass) declaration).classMirror; TypeDescriptor[] tdArgs = getTypeDescriptorsForProducedTypes(type.getTypeArgumentList()); TypeDescriptor ret = TypeDescriptor.klass(classMirror.klass, tdArgs); if(type.getQualifyingType() != null) return TypeDescriptor.member(getTypeDescriptorForProducedType(type.getQualifyingType()), ret); return ret; } if(declaration instanceof LazyInterface){ ReflectionClass classMirror = (ReflectionClass) ((LazyInterface) declaration).classMirror; TypeDescriptor[] tdArgs = getTypeDescriptorsForProducedTypes(type.getTypeArgumentList()); TypeDescriptor ret = TypeDescriptor.klass(classMirror.klass, tdArgs); if(type.getQualifyingType() != null) return TypeDescriptor.member(getTypeDescriptorForProducedType(type.getQualifyingType()), ret); return ret; } if(declaration instanceof NothingType){ return TypeDescriptor.NothingType; } if(declaration instanceof com.redhat.ceylon.compiler.typechecker.model.UnionType){ TypeDescriptor[] tdArgs = getTypeDescriptorsForProducedTypes(type.getCaseTypes()); return TypeDescriptor.union(tdArgs); } if(declaration instanceof com.redhat.ceylon.compiler.typechecker.model.IntersectionType){ TypeDescriptor[] tdArgs = getTypeDescriptorsForProducedTypes(type.getSatisfiedTypes()); return TypeDescriptor.intersection(tdArgs); } throw new RuntimeException("Unsupported declaration type: " + declaration); } private static TypeDescriptor[] getTypeDescriptorsForProducedTypes(List<ProducedType> args) { TypeDescriptor[] tdArgs = new TypeDescriptor[args.size()]; for(int i=0;i<tdArgs.length;i++){ tdArgs[i] = getTypeDescriptorForProducedType(args.get(i)); } return tdArgs; } public static ceylon.language.meta.declaration.FunctionDeclaration getMetamodel(Method method) { // find its container Scope container = method.getContainer(); if(container instanceof com.redhat.ceylon.compiler.typechecker.model.ClassOrInterface){ com.redhat.ceylon.compiler.java.runtime.metamodel.FreeClassOrInterface classOrInterface = (FreeClassOrInterface) getOrCreateMetamodel((com.redhat.ceylon.compiler.typechecker.model.ClassOrInterface) container); // now find the method ceylon.language.meta.declaration.FunctionDeclaration ret = classOrInterface.findMethod(method.getName()); if(ret == null) throw new RuntimeException("Failed to find method "+method.getName()+" in "+container); return ret; } if(container instanceof com.redhat.ceylon.compiler.typechecker.model.Package){ ceylon.language.meta.declaration.Package pkg = getOrCreateMetamodel((com.redhat.ceylon.compiler.typechecker.model.Package) container); ceylon.language.meta.declaration.FunctionDeclaration ret = pkg.getFunction(method.getName()); if(ret == null) throw new RuntimeException("Failed to find method "+method.getName()+" in "+container); return ret; } throw new RuntimeException("Unsupported method container for "+method.getName()+": "+container); } public static com.redhat.ceylon.compiler.typechecker.model.ProducedType getModel(ceylon.language.meta.declaration.OpenType pt) { if(pt instanceof FreeClassOrInterfaceType) return ((FreeClassOrInterfaceType)pt).producedType; throw new RuntimeException("Unsupported produced type: " + pt); } public static com.redhat.ceylon.compiler.typechecker.model.ProducedType getModel(ceylon.language.meta.model.Type<?> pt) { if(pt instanceof AppliedClassOrInterface) return ((AppliedClassOrInterface<?>)pt).producedType; if(pt instanceof AppliedUnionType<?>) return ((AppliedUnionType<?>)pt).model; if(pt instanceof AppliedIntersectionType<?>) return ((AppliedIntersectionType<?>)pt).model; if(pt instanceof ceylon.language.meta.model.nothingType_) return new NothingType(moduleManager.getModelLoader().getUnit()).getType(); throw new RuntimeException("Unsupported applied produced type: " + pt); } public static com.redhat.ceylon.compiler.typechecker.model.Package getPackage(com.redhat.ceylon.compiler.typechecker.model.Declaration declaration) { Scope scope = declaration.getContainer(); while(scope != null && scope instanceof com.redhat.ceylon.compiler.typechecker.model.Package == false) scope = scope.getContainer(); if(scope == null) throw new RuntimeException("Declaration with no package: "+declaration); return (com.redhat.ceylon.compiler.typechecker.model.Package)scope; } public static java.util.List<com.redhat.ceylon.compiler.typechecker.model.ProducedType> getProducedTypes(Sequential<? extends ceylon.language.meta.model.Type<?>> types) { Iterator<?> iterator = types.iterator(); Object it; List<com.redhat.ceylon.compiler.typechecker.model.ProducedType> producedTypes = new LinkedList<com.redhat.ceylon.compiler.typechecker.model.ProducedType>(); while((it = iterator.next()) != finished_.get_()){ ceylon.language.meta.model.Type<?> pt = (ceylon.language.meta.model.Type<?>) it; com.redhat.ceylon.compiler.typechecker.model.ProducedType modelPt = Metamodel.getModel(pt); producedTypes.add(modelPt); } return producedTypes; } /** * returns the java.lang.Class of the given the Ceylon metamodel of * an annotation class. */ public static <Value extends ConstrainedAnnotation<? extends Value, ? extends Values, ? super ProgramElement>, Values, ProgramElement extends Annotated> Class<?> getReflectedAnnotationClass( ClassOrInterface<? extends ConstrainedAnnotation<? extends Value, ? extends Values, ? super ProgramElement>> annotationType) { FreeClassOrInterface freeClass; if (annotationType instanceof AppliedClassOrInterface) { freeClass = (FreeClassOrInterface)(annotationType.getDeclaration()); } else { freeClass = (FreeClassOrInterface)annotationType; } final Class<?> refAnnotationClass = getJavaClass(freeClass.declaration); return refAnnotationClass; } @SuppressWarnings("unchecked") private static <A extends ceylon.language.Annotation> void addAnnotation( SequenceBuilder<A> ceylonAnnotations, java.lang.annotation.Annotation jAnnotation, Predicates.Predicate<A> pred) { Class<? extends java.lang.annotation.Annotation> jAnnotationType = jAnnotation.annotationType(); if (pred != null && pred instanceof Predicates.AnnotationPredicate && !((Predicates.AnnotationPredicate<A>)pred).shouldInstantiate(jAnnotationType)) { return; } if (jAnnotationType.getAnnotation(Ceylon.class) == null) { // It's a Java annotation addProxyCeylonAnnotation(ceylonAnnotations, jAnnotation); return; } if (jAnnotationType.getName().endsWith("$annotations$")) { java.lang.annotation.Annotation[] jAnnotations; try { jAnnotations = (java.lang.annotation.Annotation[])jAnnotationType.getMethod("value").invoke(jAnnotation); } catch (ReflectiveOperationException e) { throw new RuntimeException("While unwrapping a sequenced annotation", e); } for (java.lang.annotation.Annotation wrapped : jAnnotations) { addAnnotation(ceylonAnnotations, wrapped, pred); } } else { // Find the annotation class String annotationName = jAnnotationType.getName(); if (!annotationName.endsWith("$annotation$")) { throw new RuntimeException(); } String className = annotationName.substring(0, annotationName.length() - "$annotation$".length()); java.lang.Class<A> annotationClass; try { annotationClass = (java.lang.Class<A>)Class.forName(className, false, jAnnotationType.getClassLoader()); } catch (ClassNotFoundException e) { throw new RuntimeException("Unable to find annotation class " + className + " for annotation type " + annotationName, e); } // Invoke it with the jAnnotation as the only argument try { Constructor<A> constructor = annotationClass.getDeclaredConstructor(jAnnotationType); constructor.setAccessible(true); A cAnnotation = constructor.newInstance(jAnnotation); if (pred.accept(cAnnotation)) { ceylonAnnotations.append(cAnnotation); } } catch (ReflectiveOperationException e) { throw new RuntimeException("While reflectively instantiating " + annotationClass, e); } } } private static void addProxyCeylonAnnotation( SequenceBuilder<? extends ceylon.language.Annotation> ceylonAnnotations, java.lang.annotation.Annotation jAnnotation) { Class<? extends java.lang.annotation.Annotation> jAnnotationType = jAnnotation.annotationType(); InvocationHandler handler = new InvocationHandler() { @Override public Object invoke(Object proxy, java.lang.reflect.Method method, Object[] args) throws Throwable { // TODO Auto-generated method stub // return null; } }; java.lang.reflect.Proxy.newProxyInstance(jAnnotationType.getClassLoader(), new Class[]{jAnnotationType, ceylon.language.Annotation.class}, handler); } public static <A extends ceylon.language.Annotation> Sequential<? extends A> annotations( TypeDescriptor $reifiedValues, Annotated annotated) { // TODO If the annotated is not a valid target for the annotationType // we can return empty immediately Predicates.Predicate<A> predicate = Predicates.isAnnotationOfType($reifiedValues); return annotations($reifiedValues, annotated, predicate); } public static <A extends ceylon.language.Annotation> Sequential<? extends A> annotations(TypeDescriptor $reifiedValues, Annotated annotated, Predicates.Predicate<A> predicate) { java.lang.annotation.Annotation[] jAnnotations = ((AnnotationBearing)annotated).$getJavaAnnotations$(); if (jAnnotations == null) { throw new RuntimeException("Unable to find java.lang.reflect.AnnotatedElement for " + annotated); } // TODO Fix initial size estimate when query for OptionalAnnotation SequenceBuilder<A> ceylonAnnotations = new SequenceBuilder<A>($reifiedValues, jAnnotations.length); for (java.lang.annotation.Annotation jAnnotation: jAnnotations) { addAnnotation(ceylonAnnotations, jAnnotation, predicate); } return ceylonAnnotations.getSequence(); } public static String getJavaMethodName(Method method) { // FIXME: introduce a damn interface for getRealName() if(method instanceof JavaMethod) return ((JavaMethod)method).getRealName(); else if(method instanceof LazyMethod){ return ((LazyMethod)method).getRealMethodName(); }else throw new RuntimeException("Function declaration type not supported yet: "+method); } public static int getFirstDefaultedParameter(List<Parameter> parameters) { int i = 0; for(Parameter param : parameters){ if(param.isDefaulted()){ return i; } i++; } return -1; } public static int getVariadicParameter(List<Parameter> parameters) { int i = 0; for(Parameter param : parameters){ if(param.isSequenced()){ return i; } i++; } return -1; } public static Sequential<? extends ceylon.language.meta.declaration.Module> getModuleList() { // FIXME: this probably needs synchronisation to avoid new modules loaded during traversal Set<com.redhat.ceylon.compiler.typechecker.model.Module> modules = moduleManager.getContext().getModules().getListOfModules(); ceylon.language.meta.declaration.Module[] array = new ceylon.language.meta.declaration.Module[modules.size()]; int i=0; for(com.redhat.ceylon.compiler.typechecker.model.Module module : modules){ array[i++] = getOrCreateMetamodel(module); } return Util.sequentialInstance(Module.$TypeDescriptor$, array); } public static ceylon.language.meta.declaration.Module findLoadedModule(String name, String version) { // FIXME: this probably needs synchronisation to avoid new modules loaded during traversal com.redhat.ceylon.compiler.typechecker.model.Module module = moduleManager.findLoadedModule(name, version); return module != null ? getOrCreateMetamodel(module) : null; } public static Module getDefaultModule() { com.redhat.ceylon.compiler.typechecker.model.Module module = moduleManager.getContext().getModules().getDefaultModule(); return module != null ? getOrCreateMetamodel(module) : null; } public static List<ProducedType> getParameterProducedTypes(List<Parameter> parameters, ProducedReference producedReference) { List<ProducedType> parameterProducedTypes = new ArrayList<ProducedType>(parameters.size()); for(Parameter parameter : parameters){ ProducedType ft = producedReference.getTypedParameter(parameter).getFullType(); parameterProducedTypes.add(ft); } return parameterProducedTypes; } public static boolean isCeylon(com.redhat.ceylon.compiler.typechecker.model.ClassOrInterface declaration){ if(declaration instanceof LazyClass) return ((LazyClass) declaration).isCeylon(); if(declaration instanceof LazyInterface) return ((LazyInterface) declaration).isCeylon(); throw new RuntimeException("Declaration type not supported: "+declaration); } public static TypeDescriptor getTypeDescriptorForArguments(com.redhat.ceylon.compiler.typechecker.model.Unit unit, com.redhat.ceylon.compiler.typechecker.model.Functional decl, ProducedReference producedReference) { if(!decl.getParameterLists().isEmpty()){ List<Parameter> parameters = decl.getParameterLists().get(0).getParameters(); com.redhat.ceylon.compiler.typechecker.model.ProducedType tupleType = unit.getParameterTypesAsTupleType(parameters, producedReference); return Metamodel.getTypeDescriptorForProducedType(tupleType); }else{ return TypeDescriptor.NothingType; } } public static ProducedType getProducedTypeForArguments(com.redhat.ceylon.compiler.typechecker.model.Unit unit, com.redhat.ceylon.compiler.typechecker.model.Functional decl, ProducedReference producedReference) { if(!decl.getParameterLists().isEmpty()){ List<Parameter> parameters = decl.getParameterLists().get(0).getParameters(); return unit.getParameterTypesAsTupleType(parameters, producedReference); }else{ return new NothingType(unit).getType(); } } /** * This is also used by generated code in the JVM compiler, for type declaration literals. * In theory this can only be used for ClassOrInterface or TypeAlias. */ public static ceylon.language.meta.declaration.NestableDeclaration getOrCreateMetamodel(java.lang.Class<?> klass){ // FIXME: is this really enough? String typeName = klass.getName(); com.redhat.ceylon.compiler.typechecker.model.Module module = moduleManager.findModuleForClass(klass); com.redhat.ceylon.compiler.typechecker.model.TypeDeclaration decl = (com.redhat.ceylon.compiler.typechecker.model.TypeDeclaration) moduleManager.getModelLoader().getDeclaration(module, typeName, DeclarationType.TYPE); return (ceylon.language.meta.declaration.NestableDeclaration) getOrCreateMetamodel(decl); } public static TypeDescriptor getTypeDescriptorForFunction(ProducedReference appliedFunction) { return getTypeDescriptorForProducedType(getFunctionReturnType(appliedFunction)); } public static ProducedType getFunctionReturnType(ProducedReference appliedFunction) { // pull the return type out of the Callable ProducedType fullType = appliedFunction.getFullType(); return fullType.getTypeArgumentList().get(0); } public static com.redhat.ceylon.compiler.typechecker.model.Parameter getParameterFromTypedDeclaration(com.redhat.ceylon.compiler.typechecker.model.TypedDeclaration declaration) { if(declaration instanceof com.redhat.ceylon.compiler.typechecker.model.MethodOrValue) return ((com.redhat.ceylon.compiler.typechecker.model.MethodOrValue) declaration).getInitializerParameter(); return null; } /** * Called when an annotation class is instantiated via an annotation * constructor or annotation callsite to convert the String representation * of a Declaration literal back into the corresponding Declaration. */ @SuppressWarnings("unchecked") public static <T extends ceylon.language.meta.declaration.Declaration> T parseMetamodelReference(String ref/*, java.lang.Class<?> klass*/) { DeclarationParser parser = new DeclarationParser(); return (T)parser.ref(ref); } /** * Called when an annotation class is instantiated via an annotation * constructor or annotation callsite to convert an array of String representations * of Declaration literals back into a Sequential of Declarations. */ @SuppressWarnings("unchecked") public static <T extends ceylon.language.meta.declaration.Declaration> Sequential<T> parseMetamodelReferences(TypeDescriptor $reifiedElement, String[] refs) { DeclarationParser parser = new DeclarationParser(); ceylon.language.meta.declaration.Declaration[] array = new ceylon.language.meta.declaration.Declaration[refs.length]; for (int ii = 0; ii < refs.length; ii++) { array[ii] = (T)parser.ref(refs[ii]); } return ArraySequence.instance($reifiedElement, array); } @SuppressWarnings("unchecked") public static <T> T parseEnumerationReference(java.lang.Class<T> klass) { FreeClassOrInterface decl = (FreeClassOrInterface)getOrCreateMetamodel(klass); String getterName = Naming.getGetterName(decl.declaration); try { java.lang.reflect.Method method = klass.getMethod(getterName); return (T)method.invoke(null); } catch (ReflectiveOperationException e) { throw new RuntimeException(e); } } public static <T> Sequential<T> parseEnumerationReferences(TypeDescriptor $reifiedElement, java.lang.Class<?>[] refs) { Object[] array = new Object[refs.length]; for (int ii = 0; ii < refs.length; ii++) { array[ii] = parseEnumerationReference(refs[ii]); } return ArraySequence.instance($reifiedElement, array); } public static Sequential<? extends ceylon.language.meta.declaration.TypeParameter> getTypeParameters(com.redhat.ceylon.compiler.typechecker.model.Generic declaration) { List<com.redhat.ceylon.compiler.typechecker.model.TypeParameter> typeParameters = declaration.getTypeParameters(); ceylon.language.meta.declaration.TypeParameter[] typeParametersArray = new ceylon.language.meta.declaration.TypeParameter[typeParameters.size()]; int i=0; for(com.redhat.ceylon.compiler.typechecker.model.TypeParameter tp : typeParameters){ typeParametersArray[i++] = new com.redhat.ceylon.compiler.java.runtime.metamodel.FreeTypeParameter(tp); } return Util.sequentialInstance(ceylon.language.meta.declaration.TypeParameter.$TypeDescriptor$, typeParametersArray); } @SuppressWarnings("hiding") public static <DeclarationType extends ceylon.language.meta.declaration.Declaration> DeclarationType findDeclarationByName(Sequential<? extends DeclarationType> declarations, String name) { Iterator<? extends DeclarationType> iterator = declarations.iterator(); Object it; while((it = iterator.next()) != finished_.get_()){ @SuppressWarnings("unchecked") DeclarationType tp = (DeclarationType) it; if(tp.getName().equals(name)) return tp; } return null; } public static AnnotatedDeclaration getContainer(Declaration declaration) { Scope container = declaration.getContainer(); if(container instanceof com.redhat.ceylon.compiler.typechecker.model.Declaration) return Metamodel.getOrCreateMetamodel((com.redhat.ceylon.compiler.typechecker.model.Declaration)container); if(container instanceof com.redhat.ceylon.compiler.typechecker.model.Package) return Metamodel.getOrCreateMetamodel((com.redhat.ceylon.compiler.typechecker.model.Package)container); // FIXME: can that happen? throw new RuntimeException("Illegal container type: "+container); } public static boolean isLocalType(com.redhat.ceylon.compiler.typechecker.model.Class decl) { return ((LazyElement)decl).isLocal(); } public static ceylon.language.Map<? extends ceylon.language.meta.declaration.TypeParameter, ? extends ceylon.language.meta.model.Type<?>> getTypeArguments(ceylon.language.meta.declaration.GenericDeclaration declaration, ProducedReference appliedFunction) { java.util.Map<ceylon.language.meta.declaration.TypeParameter, ceylon.language.meta.model.Type<?>> typeArguments = new LinkedHashMap<ceylon.language.meta.declaration.TypeParameter, ceylon.language.meta.model.Type<?>>(); Iterator<? extends ceylon.language.meta.declaration.TypeParameter> typeParameters = declaration.getTypeParameterDeclarations().iterator(); Object it; java.util.Map<com.redhat.ceylon.compiler.typechecker.model.TypeParameter, com.redhat.ceylon.compiler.typechecker.model.ProducedType> ptArguments = appliedFunction.getTypeArguments(); while((it = typeParameters.next()) != finished_.get_()){ com.redhat.ceylon.compiler.java.runtime.metamodel.FreeTypeParameter tp = (com.redhat.ceylon.compiler.java.runtime.metamodel.FreeTypeParameter) it; com.redhat.ceylon.compiler.typechecker.model.TypeParameter tpDecl = (com.redhat.ceylon.compiler.typechecker.model.TypeParameter) tp.declaration; com.redhat.ceylon.compiler.typechecker.model.ProducedType ptArg = ptArguments.get(tpDecl); ceylon.language.meta.model.Type<?> ptArgWrapped = Metamodel.getAppliedMetamodel(ptArg); typeArguments.put(tp, ptArgWrapped); } return new InternalMap<ceylon.language.meta.declaration.TypeParameter, ceylon.language.meta.model.Type<?>>(ceylon.language.meta.declaration.TypeParameter.$TypeDescriptor$, TypeDescriptor.klass(ceylon.language.meta.model.Type.class, ceylon.language.Anything.$TypeDescriptor$), typeArguments); } public static String toTypeString(ceylon.language.meta.declaration.NestableDeclaration declaration, ceylon.language.Map<? extends ceylon.language.meta.declaration.TypeParameter, ?> typeArguments){ StringBuffer string = new StringBuffer(); string.append(declaration.getName()); if(declaration instanceof ceylon.language.meta.declaration.GenericDeclaration) addTypeArguments(string, (ceylon.language.meta.declaration.GenericDeclaration)declaration, typeArguments); java.lang.Object container = declaration.getContainer(); while(container != null){ if(container instanceof Package) return ((Package)container).getName() + "::" + string; StringBuffer string2 = new StringBuffer(((NestableDeclaration)container).getName()); if(container instanceof ceylon.language.meta.declaration.GenericDeclaration) addTypeArguments(string2, (ceylon.language.meta.declaration.GenericDeclaration)container, typeArguments); string2.append("."); string.insert(0, string2.toString()); container = ((NestableDeclaration)container).getContainer(); } return string.toString(); } private static void addTypeArguments(StringBuffer string, ceylon.language.meta.declaration.GenericDeclaration declaration, ceylon.language.Map<? extends ceylon.language.meta.declaration.TypeParameter, ?> typeArguments) { if(!declaration.getTypeParameterDeclarations().getEmpty()){ string.append("<"); Iterator<?> iterator = declaration.getTypeParameterDeclarations().iterator(); Object it; boolean once = true; while((it = iterator.next()) != finished_.get_()){ if(once) once = false; else string.append(","); ceylon.language.meta.declaration.TypeParameter tpDecl = (ceylon.language.meta.declaration.TypeParameter) it; Object val = typeArguments != null ? typeArguments.get(tpDecl) : null; string.append(val != null ? val : "##Missing##"); } string.append(">"); } } public static String toTypeString(ceylon.language.meta.model.Model model){ StringBuffer string = new StringBuffer(); ceylon.language.meta.model.Type<?> container = model.getContainer(); if(container == null){ string.append(model.getDeclaration().getContainingPackage().getName()).append("::"); }else if(container instanceof ceylon.language.meta.model.ClassOrInterface<?>){ string.append(container.toString()).append("."); }else{ string.append("<").append(container.toString()).append(">."); } string.append(model.getDeclaration().getName()); if(model instanceof ceylon.language.meta.model.Generic) addTypeArguments(string, (ceylon.language.meta.declaration.GenericDeclaration) model.getDeclaration(), ((ceylon.language.meta.model.Generic)model).getTypeArguments()); return string.toString(); } public static void checkTypeArguments(ProducedType qualifyingType, Declaration declaration, List<ProducedType> typeArguments) { if(declaration instanceof com.redhat.ceylon.compiler.typechecker.model.Generic){ List<com.redhat.ceylon.compiler.typechecker.model.TypeParameter> typeParameters = ((com.redhat.ceylon.compiler.typechecker.model.Generic) declaration).getTypeParameters(); if(typeParameters.size() < typeArguments.size()) throw new TypeApplicationException("Too many type arguments provided: "+typeArguments.size()+", but only accepts "+typeParameters.size()); int min = 0; for (TypeParameter tp: typeParameters) { if (!tp.isDefaulted()) min++; } if(typeArguments.size() < min){ String requires = (min == typeParameters.size()) ? "exactly" : "at least"; throw new TypeApplicationException("Not enough type arguments provided: "+typeArguments.size()+", but requires "+requires+" "+min); } for(int i=0;i<typeArguments.size();i++){ ProducedType typeArgument = typeArguments.get(i); com.redhat.ceylon.compiler.typechecker.model.TypeParameter typeParameter = typeParameters.get(i); for (ProducedType st: typeParameter.getSatisfiedTypes()) { ProducedType sts = st.getProducedType(qualifyingType, declaration, typeArguments); if (!typeArgument.isSubtypeOf(sts)) { throw new TypeApplicationException("Type argument "+i+": "+typeArgument.getProducedTypeQualifiedName() +" does not conform to upper bound constraint: "+sts.getProducedTypeQualifiedName() +" of type parameter "+typeParameter.getQualifiedNameString()); } } if(!ExpressionVisitor.argumentSatisfiesEnumeratedConstraint(qualifyingType, declaration, typeArguments, typeArgument, typeParameter)){ throw new TypeApplicationException("Type argument "+i+": "+typeArgument.getProducedTypeQualifiedName() +" does not conform to enumerated constraints " +" of type parameter "+typeParameter.getQualifiedNameString()); } } }else{ if(!typeArguments.isEmpty()) throw new TypeApplicationException("Declaration does not accept type arguments"); } } public static boolean isTypeOf(ProducedType producedType, Object instance) { ProducedType instanceType = Metamodel.getProducedType(instance); return instanceType.isSubtypeOf(producedType); } public static boolean isSuperTypeOf(ProducedType a, ceylon.language.meta.model.Type<? extends Object> type) { ProducedType b = Metamodel.getModel(type); return a.isSupertypeOf(b); } public static boolean isSubTypeOf(ProducedType a, ceylon.language.meta.model.Type<? extends Object> type) { ProducedType b = Metamodel.getModel(type); return a.isSubtypeOf(b); } public static boolean isExactly(ProducedType a, ceylon.language.meta.model.Type<? extends Object> type) { ProducedType b = Metamodel.getModel(type); return a.isExactly(b); } public static void checkReifiedTypeArgument(String methodName, String className, Variance variance, ProducedType appliedType, TypeDescriptor $reifiedType) { ProducedType expectedReifiedType = Metamodel.getProducedType($reifiedType); boolean check = checkReifiedTypeArgument(variance, appliedType, expectedReifiedType); if(!check){ String appliedTypeString = appliedType.getProducedTypeName(); String expectedReifiedTypeString = expectedReifiedType.getProducedTypeName(); String appliedString = className.replace("$1", appliedTypeString); String expectedString = className.replace("$1", expectedReifiedTypeString); throw new IncompatibleTypeException("Incompatible type: actual type of applied declaration is "+appliedString +" is not compatible with expected type: "+expectedString+". Try passing the type argument explicitly with: " +methodName+"<"+appliedTypeString+">()"); } } public static void checkReifiedTypeArgument(String methodName, String className, Variance variance1, ProducedType appliedType1, TypeDescriptor $reifiedType1, Variance variance2, ProducedType appliedType2, TypeDescriptor $reifiedType2) { ProducedType expectedReifiedType1 = Metamodel.getProducedType($reifiedType1); ProducedType expectedReifiedType2 = Metamodel.getProducedType($reifiedType2); boolean check1 = checkReifiedTypeArgument(variance1, appliedType1, expectedReifiedType1); boolean check2 = checkReifiedTypeArgument(variance2, appliedType2, expectedReifiedType2); if(!check1 || !check2){ String appliedTypeString1 = appliedType1.getProducedTypeName(); String expectedReifiedTypeString1 = expectedReifiedType1.getProducedTypeName(); String appliedTypeString2 = appliedType2.getProducedTypeName(); String expectedReifiedTypeString2 = expectedReifiedType2.getProducedTypeName(); String appliedString = className.replace("$1", appliedTypeString1).replace("$2", appliedTypeString2); String expectedString = className.replace("$1", expectedReifiedTypeString1).replace("$2", expectedReifiedTypeString2); throw new IncompatibleTypeException("Incompatible type: actual type of applied declaration is "+appliedString +" is not compatible with expected type: "+expectedString+". Try passing the type argument explicitly with: " +methodName+"<"+appliedTypeString1+","+appliedTypeString2+">()"); } } public static void checkReifiedTypeArgument(String methodName, String className, Variance variance1, ProducedType appliedType1, TypeDescriptor $reifiedType1, Variance variance2, ProducedType appliedType2, TypeDescriptor $reifiedType2, Variance variance3, ProducedType appliedType3, TypeDescriptor $reifiedType3) { ProducedType expectedReifiedType1 = Metamodel.getProducedType($reifiedType1); ProducedType expectedReifiedType2 = Metamodel.getProducedType($reifiedType2); ProducedType expectedReifiedType3 = Metamodel.getProducedType($reifiedType3); boolean check1 = checkReifiedTypeArgument(variance1, appliedType1, expectedReifiedType1); boolean check2 = checkReifiedTypeArgument(variance2, appliedType2, expectedReifiedType2); boolean check3 = checkReifiedTypeArgument(variance3, appliedType3, expectedReifiedType3); if(!check1 || !check2 || !check3){ String appliedTypeString1 = appliedType1.getProducedTypeName(); String expectedReifiedTypeString1 = expectedReifiedType1.getProducedTypeName(); String appliedTypeString2 = appliedType2.getProducedTypeName(); String expectedReifiedTypeString2 = expectedReifiedType2.getProducedTypeName(); String appliedTypeString3 = appliedType3.getProducedTypeName(); String expectedReifiedTypeString3 = expectedReifiedType3.getProducedTypeName(); String appliedString = className.replace("$1", appliedTypeString1).replace("$2", appliedTypeString2).replace("$3", appliedTypeString3); String expectedString = className.replace("$1", expectedReifiedTypeString1).replace("$2", expectedReifiedTypeString2).replace("$3", expectedReifiedTypeString3); throw new IncompatibleTypeException("Incompatible type: actual type of applied declaration is "+appliedString +" is not compatible with expected type: "+expectedString+". Try passing the type argument explicitly with: " +methodName+"<"+appliedTypeString1+","+appliedTypeString2+","+appliedTypeString3+">()"); } } private static boolean checkReifiedTypeArgument(Variance variance, ProducedType appliedType, ProducedType expectedReifiedType) { switch(variance){ case IN: return appliedType.isSupertypeOf(expectedReifiedType); case OUT: return appliedType.isSubtypeOf(expectedReifiedType); case NONE: return appliedType.isExactly(expectedReifiedType); default: throw new RuntimeException("Invalid variance: "+variance); } } public static void checkQualifyingType(ProducedType qualifyingType, Declaration declaration) { Scope container = declaration.getContainer(); if(container instanceof TypeDeclaration == false) throw new IncompatibleTypeException("Declaration container is not a type: "+container); TypeDeclaration typeDecl = (TypeDeclaration) container; ProducedType supertype = qualifyingType.getSupertype(typeDecl); if(supertype == null) throw new IncompatibleTypeException("Invalid container type: "+qualifyingType+" is not a subtype of "+typeDecl); } public static <Return> Return apply(Callable<? extends Return> function, Sequential<?> arguments, List<ProducedType> parameterProducedTypes, int firstDefaulted, int variadicIndex){ int argumentCount = (int) arguments.getSize(); int parameters = parameterProducedTypes.size(); // check minimum if(firstDefaulted == -1){ if(argumentCount < parameters) throw new InvocationException("Not enough arguments to function. Expected "+parameters+" but got only "+argumentCount); }else if(argumentCount < firstDefaulted) throw new InvocationException("Not enough arguments to function. Expected at least "+firstDefaulted+" but got only "+argumentCount); // check maximum if(variadicIndex == -1){ if(argumentCount > parameters) throw new InvocationException("To many arguments to function. Expected at most "+parameters+" but got "+argumentCount); }// if we're variadic we accept any number // now check their types Iterator<?> it = arguments.iterator(); Object arg; int i = 0; ProducedType variadicElement = null; if(variadicIndex != -1) // it must be a Sequential<T> variadicElement = parameterProducedTypes.get(variadicIndex).getTypeArgumentList().get(0); while((arg = it.next()) != finished_.get_()){ ProducedType parameterType = variadicIndex == -1 || i < variadicIndex ? // normal param parameterProducedTypes.get(i) // variadic param : variadicElement; ProducedType argumentType = Metamodel.getProducedType(arg); if(!argumentType.isSubtypeOf(parameterType)) throw new IncompatibleTypeException("Invalid argument "+i+", expected type "+parameterType+" but got "+argumentType); i++; } // they are all good, let's call it return Util.apply(function, arguments); } public static ceylon.language.meta.model.Model bind(ceylon.language.meta.model.Member<?,?> member, ProducedType containerType, Object container){ if(container == null) throw new IncompatibleTypeException("Invalid container "+container+", expected type "+containerType+" but got ceylon.language::Null"); ProducedType argumentType = Metamodel.getProducedType(container); if(!argumentType.isSubtypeOf(containerType)) throw new IncompatibleTypeException("Invalid container "+container+", expected type "+containerType+" but got "+argumentType); return member.$call$(container); } }
add todo
runtime/com/redhat/ceylon/compiler/java/runtime/metamodel/Metamodel.java
add todo
<ide><path>untime/com/redhat/ceylon/compiler/java/runtime/metamodel/Metamodel.java <ide> if(klass.isArray()) <ide> return getJavaArrayTypeDescriptor(klass); <ide> // make sure java.lang.Object doesn't leak in the ceylon metamodel <add> // TODO: what about Throwable/j.l.Exception/RuntimeException? <ide> if(klass == Object.class) <ide> return ceylon.language.Object.$TypeDescriptor$; <ide> return TypeDescriptor.klass(klass);
Java
apache-2.0
7fc2051d17682d3590a39d9906ca35ec2edccac8
0
Natio/Places,Natio/Places,Natio/Places
package com.gcw.sapienza.places.fragments; import android.app.Activity; import android.content.BroadcastReceiver; import android.content.Context; import android.content.Intent; import android.content.IntentFilter; import android.graphics.Bitmap; import android.graphics.BitmapFactory; import android.location.Location; import android.os.Bundle; import android.support.annotation.Nullable; import android.support.v4.app.Fragment; import android.support.v4.app.FragmentActivity; import android.support.v4.content.LocalBroadcastManager; import android.support.v4.widget.SwipeRefreshLayout; import android.support.v7.widget.LinearLayoutManager; import android.support.v7.widget.RecyclerView; import android.util.Log; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.view.animation.AlphaAnimation; import android.widget.RelativeLayout; import com.gcw.sapienza.places.R; import com.gcw.sapienza.places.activities.MainActivity; import com.gcw.sapienza.places.layouts.MSwipeRefreshLayout; import com.gcw.sapienza.places.models.Flag; import com.gcw.sapienza.places.services.LocationService; import com.gcw.sapienza.places.utils.FlagsStorage; import com.gcw.sapienza.places.utils.Utils; import com.google.android.gms.maps.CameraUpdateFactory; import com.google.android.gms.maps.GoogleMap; import com.google.android.gms.maps.OnMapReadyCallback; import com.google.android.gms.maps.SupportMapFragment; import com.google.android.gms.maps.model.BitmapDescriptorFactory; import com.google.android.gms.maps.model.LatLng; import com.google.android.gms.maps.model.LatLngBounds; import com.google.android.gms.maps.model.Marker; import com.google.android.gms.maps.model.MarkerOptions; import com.parse.ParseGeoPoint; import java.util.ArrayList; import java.util.List; /** * Created by snowblack on 3/20/15. */ public class BagFragment extends Fragment implements OnMapReadyCallback, SwipeRefreshLayout.OnRefreshListener, GoogleMap.OnMarkerClickListener{ private static final String TAG = "BagFragment"; private GoogleMap gMap; private BroadcastReceiver receiver; private RelativeLayout progressBarHolder; private MSwipeRefreshLayout srl; private FragmentActivity myContext; private List<Marker> markers; @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); this.receiver = new BroadcastReceiver() { @Override public void onReceive(Context context, Intent intent) { /** * eventually, we might want to have different behaviors * for different events (whether my flags have been found or not) */ switch (intent.getAction()) { case LocationService.FOUND_BAG_FLAGS_NOTIFICATION: Log.d(TAG, "My bag flags found"); // MyFlagsFragment.this.dismissProgressBar(); break; case LocationService.FOUND_NO_BAG_FLAGS_NOTIFICATION: Log.d(TAG, "No my bag flags found"); // MyFlagsFragment.this.dismissProgressBar(); break; default: } updateMarkersOnMap(); } }; LocalBroadcastManager.getInstance(getActivity()).registerReceiver(this.receiver, new IntentFilter(LocationService.FOUND_BAG_FLAGS_NOTIFICATION)); LocalBroadcastManager.getInstance(getActivity()).registerReceiver(this.receiver, new IntentFilter(LocationService.FOUND_NO_BAG_FLAGS_NOTIFICATION)); } @Override public void onAttach(Activity activity) { this.myContext = (FragmentActivity) activity; super.onAttach(activity); } @Override public View onCreateView(LayoutInflater inflater, @Nullable ViewGroup container, @Nullable Bundle savedInstanceState) { super.onCreateView(inflater, container, savedInstanceState); View view = inflater.inflate(R.layout.my_flags_layout, container, false); this.progressBarHolder = (RelativeLayout) view.findViewById(R.id.frame_layout); srl = (MSwipeRefreshLayout) view.findViewById(R.id.my_swipe_refresh); srl.setOnRefreshListener(this); // this.homeHolder.setVisibility(View.INVISIBLE); // this.fragHolder.setVisibility(View.INVISIBLE); // this.srl.setVisibility(View.INVISIBLE); srl.setOnChildScrollUpListener(new MSwipeRefreshLayout.OnChildScrollUpListener() { @Override public boolean canChildScrollUp() { List<Fragment> frags = myContext.getSupportFragmentManager().getFragments(); if (frags.size() < 1) return false; RecyclerView rv = null; for (int i = 0; i < frags.size(); i++) { if (frags.get(i) instanceof MyBagFlagsListFragment) { rv = ((MyBagFlagsListFragment) frags.get(i)).getRV(); break; } } if (rv == null) return false; RecyclerView.LayoutManager layoutManager = rv.getLayoutManager(); int position = ((LinearLayoutManager) layoutManager).findFirstCompletelyVisibleItemPosition(); // Log.d(TAG, "First completely visible item position: " + position); return position != 0 && rv.getAdapter().getItemCount() != 0; } }); // showProgressBar(); Fragment fragment = new MyBagFlagsListFragment(); myContext.getSupportFragmentManager().beginTransaction().replace(R.id.my_swipe_refresh, fragment).commit(); SupportMapFragment mapFragment = new SupportMapFragment(); myContext.getSupportFragmentManager().beginTransaction().replace(R.id.my_map_holder, mapFragment).commit(); mapFragment.getMapAsync(BagFragment.this); return view; } private void showProgressBar() { AlphaAnimation inAnim = new AlphaAnimation(0, 1); inAnim.setDuration(Utils.ANIMATION_DURATION); progressBarHolder.setAnimation(inAnim); progressBarHolder.setVisibility(View.VISIBLE); } private void dismissProgressBar() { AlphaAnimation outAnim = new AlphaAnimation(1, 0); outAnim.setDuration(Utils.ANIMATION_DURATION); progressBarHolder.setAnimation(outAnim); progressBarHolder.setVisibility(View.GONE); // MyFlagsFragment.this.homeHolder.setVisibility(View.VISIBLE); // MyFlagsFragment.this.fragHolder.setVisibility(View.VISIBLE); // MyFlagsFragment.this.srl.setVisibility(View.VISIBLE); } @Override public void onDestroy() { super.onDestroy(); LocalBroadcastManager.getInstance(getActivity()).unregisterReceiver(this.receiver); } @Override public void onMapReady(GoogleMap googleMap) { this.gMap = googleMap; this.gMap.getUiSettings().setScrollGesturesEnabled(true); this.gMap.getUiSettings().setZoomGesturesEnabled(true); this.gMap.setOnMarkerClickListener(this); this.gMap.setMyLocationEnabled(true); ((MainActivity) getActivity()).refresh(Utils.BAG_FLAGS_CODE); } @Override public boolean onMarkerClick(Marker selectedMarker) { int index = Integer.parseInt(selectedMarker.getSnippet()); List<Fragment> frags = getActivity().getSupportFragmentManager().getFragments(); if (frags.size() < 1) return false; for (int i = 0; i < frags.size(); i++) { if (frags.get(i) instanceof MyBagFlagsListFragment) { MyBagFlagsListFragment flf = ((MyBagFlagsListFragment) frags.get(i)); flf.getRV().smoothScrollToPosition(index); // TODO item highlight on flag clicked on map? break; } } if(selectedMarker.getAlpha() == Utils.FLAG_ALPHA_SELECTED){ for (Marker marker : this.markers) { marker.setAlpha(Utils.FLAG_ALPHA_NORMAL); } }else { for (Marker marker : this.markers) { marker.setAlpha(Utils.FLAG_ALPHA_UNSELECTED); } selectedMarker.setAlpha(Utils.FLAG_ALPHA_SELECTED); } // by returning false we can show text on flag in the map // return false; return true; } private void updateMarkersOnMap() { this.markers = new ArrayList<>(); List<Flag> flags = FlagsStorage.getSharedStorage().getOrderedFlags(getActivity(), FlagsStorage.Type.BAG ); if (flags != null && this.gMap != null) { this.gMap.clear(); //zooms around all the Flags LatLngBounds.Builder builder = new LatLngBounds.Builder(); int index = 0; for (Flag f : flags) { // Log.d(TAG, "===FLAG DATA==="); // Log.d(TAG, f.getObjectId()); // Log.d(TAG, f.getText()); // Log.d(TAG, "==============="); ParseGeoPoint location = f.getLocation(); String text = f.getText(); LatLng latLng = new LatLng(location.getLatitude(), location.getLongitude()); builder.include(latLng); //25% size original icon int marker_id = Utils.getIconForCategory(f.getCategory(), getActivity()); Bitmap marker = BitmapFactory.decodeResource(getResources(), marker_id); Bitmap halfSizeMarker = Bitmap.createScaledBitmap (marker, (int) (marker.getWidth() * Utils.FLAG_SCALE_NORMAL), (int) (marker.getHeight() * Utils.FLAG_SCALE_NORMAL), false); Marker newMarker = this.gMap.addMarker(new MarkerOptions() .position(latLng) .title(text) .snippet( String.valueOf(index) ) .icon(BitmapDescriptorFactory.fromBitmap(halfSizeMarker)) // .icon(BitmapDescriptorFactory.fromResource(getIconForCategory(f.getCategory()))) //.icon(BitmapDescriptorFactory.defaultMarker(getCategoryColor(f.getCategory()))) .alpha(Utils.FLAG_ALPHA_NORMAL)); this.markers.add(newMarker); index++; } if (flags.size() > 0) { LatLngBounds bounds = builder.build(); // this.gMap.moveCamera(CameraUpdateFactory.newLatLngBounds(bounds, Utils.MAP_BOUNDS)); this.gMap.moveCamera(CameraUpdateFactory.newLatLngBounds(bounds, Utils.MAP_BOUNDS)); } else { Location currentLocation = gMap.getMyLocation(); if (currentLocation != null) { LatLng currentLocationLatLng = new LatLng(currentLocation.getLatitude(), currentLocation.getLongitude()); this.gMap.animateCamera(CameraUpdateFactory .newLatLngZoom(currentLocationLatLng, Utils.ZOOM_LVL)); } } } } @Override public void onRefresh() { refresh(); srl.setRefreshing(false); } protected void refresh() { ((MainActivity) getActivity()).refresh(Utils.BAG_FLAGS_CODE); } }
app/src/main/java/com/gcw/sapienza/places/fragments/BagFragment.java
package com.gcw.sapienza.places.fragments; import android.app.Activity; import android.content.BroadcastReceiver; import android.content.Context; import android.content.Intent; import android.content.IntentFilter; import android.graphics.Bitmap; import android.graphics.BitmapFactory; import android.location.Location; import android.os.Bundle; import android.support.annotation.Nullable; import android.support.v4.app.Fragment; import android.support.v4.app.FragmentActivity; import android.support.v4.content.LocalBroadcastManager; import android.support.v4.widget.DrawerLayout; import android.support.v4.widget.SwipeRefreshLayout; import android.support.v7.widget.LinearLayoutManager; import android.support.v7.widget.RecyclerView; import android.util.Log; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.view.animation.AlphaAnimation; import android.widget.FrameLayout; import android.widget.LinearLayout; import android.widget.RelativeLayout; import android.widget.TextView; import com.gcw.sapienza.places.R; import com.gcw.sapienza.places.activities.MainActivity; import com.gcw.sapienza.places.layouts.MSwipeRefreshLayout; import com.gcw.sapienza.places.models.Flag; import com.gcw.sapienza.places.services.LocationService; import com.gcw.sapienza.places.utils.FlagsStorage; import com.gcw.sapienza.places.utils.Utils; import com.google.android.gms.maps.CameraUpdateFactory; import com.google.android.gms.maps.GoogleMap; import com.google.android.gms.maps.OnMapReadyCallback; import com.google.android.gms.maps.SupportMapFragment; import com.google.android.gms.maps.model.BitmapDescriptorFactory; import com.google.android.gms.maps.model.LatLng; import com.google.android.gms.maps.model.LatLngBounds; import com.google.android.gms.maps.model.Marker; import com.google.android.gms.maps.model.MarkerOptions; import com.parse.ParseGeoPoint; import java.util.ArrayList; import java.util.List; /** * Created by snowblack on 3/20/15. */ public class BagFragment extends Fragment implements OnMapReadyCallback, SwipeRefreshLayout.OnRefreshListener, GoogleMap.OnMarkerClickListener{ private static final String TAG = "BagFragment"; private View view; private GoogleMap gMap; private BroadcastReceiver receiver; private RelativeLayout progressBarHolder; private MSwipeRefreshLayout srl; private FragmentActivity myContext; private List<Flag> flags; private TextView progressTextView; private DrawerLayout drawerLayout; private LinearLayout homeHolder; private FrameLayout fragHolder; private List<Marker> markers; @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); this.receiver = new BroadcastReceiver() { @Override public void onReceive(Context context, Intent intent) { /** * eventually, we might want to have different behaviors * for different events (whether my flags have been found or not) */ switch (intent.getAction()) { case LocationService.FOUND_BAG_FLAGS_NOTIFICATION: Log.d(TAG, "My bag flags found"); // MyFlagsFragment.this.dismissProgressBar(); break; case LocationService.FOUND_NO_BAG_FLAGS_NOTIFICATION: Log.d(TAG, "No my bag flags found"); // MyFlagsFragment.this.dismissProgressBar(); break; default: } updateMarkersOnMap(); } }; LocalBroadcastManager.getInstance(getActivity()).registerReceiver(this.receiver, new IntentFilter(LocationService.FOUND_BAG_FLAGS_NOTIFICATION)); LocalBroadcastManager.getInstance(getActivity()).registerReceiver(this.receiver, new IntentFilter(LocationService.FOUND_NO_BAG_FLAGS_NOTIFICATION)); } @Override public void onAttach(Activity activity) { this.myContext = (FragmentActivity) activity; super.onAttach(activity); } @Override public View onCreateView(LayoutInflater inflater, @Nullable ViewGroup container, @Nullable Bundle savedInstanceState) { super.onCreateView(inflater, container, savedInstanceState); view = inflater.inflate(R.layout.my_flags_layout, container, false); this.progressBarHolder = (RelativeLayout) view.findViewById(R.id.frame_layout); this.progressTextView = (TextView) view.findViewById(R.id.share_progress_text_view); this.homeHolder = (LinearLayout) view.findViewById(R.id.my_home_container); this.fragHolder = (FrameLayout) view.findViewById(R.id.my_frag_container); srl = (MSwipeRefreshLayout) view.findViewById(R.id.my_swipe_refresh); srl.setOnRefreshListener(this); // this.homeHolder.setVisibility(View.INVISIBLE); // this.fragHolder.setVisibility(View.INVISIBLE); // this.srl.setVisibility(View.INVISIBLE); srl.setOnChildScrollUpListener(new MSwipeRefreshLayout.OnChildScrollUpListener() { @Override public boolean canChildScrollUp() { List<Fragment> frags = myContext.getSupportFragmentManager().getFragments(); if (frags.size() < 1) return false; RecyclerView rv = null; for (int i = 0; i < frags.size(); i++) { if (frags.get(i) instanceof MyBagFlagsListFragment) { rv = ((MyBagFlagsListFragment) frags.get(i)).getRV(); break; } } if (rv == null) return false; RecyclerView.LayoutManager layoutManager = rv.getLayoutManager(); int position = ((LinearLayoutManager) layoutManager).findFirstCompletelyVisibleItemPosition(); // Log.d(TAG, "First completely visible item position: " + position); return position != 0 && rv.getAdapter().getItemCount() != 0; } }); // showProgressBar(); Fragment fragment = new MyBagFlagsListFragment(); myContext.getSupportFragmentManager().beginTransaction().replace(R.id.my_swipe_refresh, fragment).commit(); SupportMapFragment mapFragment = new SupportMapFragment(); myContext.getSupportFragmentManager().beginTransaction().replace(R.id.my_map_holder, mapFragment).commit(); mapFragment.getMapAsync(BagFragment.this); return view; } private void showProgressBar() { AlphaAnimation inAnim = new AlphaAnimation(0, 1); inAnim.setDuration(Utils.ANIMATION_DURATION); progressBarHolder.setAnimation(inAnim); progressBarHolder.setVisibility(View.VISIBLE); } private void dismissProgressBar() { AlphaAnimation outAnim = new AlphaAnimation(1, 0); outAnim.setDuration(Utils.ANIMATION_DURATION); progressBarHolder.setAnimation(outAnim); progressBarHolder.setVisibility(View.GONE); // MyFlagsFragment.this.homeHolder.setVisibility(View.VISIBLE); // MyFlagsFragment.this.fragHolder.setVisibility(View.VISIBLE); // MyFlagsFragment.this.srl.setVisibility(View.VISIBLE); } @Override public void onDestroy() { super.onDestroy(); LocalBroadcastManager.getInstance(getActivity()).unregisterReceiver(this.receiver); } @Override public void onMapReady(GoogleMap googleMap) { this.gMap = googleMap; this.gMap.getUiSettings().setScrollGesturesEnabled(true); this.gMap.getUiSettings().setZoomGesturesEnabled(true); this.gMap.setOnMarkerClickListener(this); this.gMap.setMyLocationEnabled(true); ((MainActivity) getActivity()).refresh(Utils.BAG_FLAGS_CODE); } @Override public boolean onMarkerClick(Marker selectedMarker) { int index = Integer.parseInt(selectedMarker.getSnippet()); List<Fragment> frags = getActivity().getSupportFragmentManager().getFragments(); if (frags.size() < 1) return false; for (int i = 0; i < frags.size(); i++) { if (frags.get(i) instanceof MyBagFlagsListFragment) { MyBagFlagsListFragment flf = ((MyBagFlagsListFragment) frags.get(i)); flf.getRV().smoothScrollToPosition(index); // TODO item highlight on flag clicked on map? break; } } if(selectedMarker.getAlpha() == Utils.FLAG_ALPHA_SELECTED){ for (Marker marker : this.markers) { marker.setAlpha(Utils.FLAG_ALPHA_NORMAL); } }else { for (Marker marker : this.markers) { marker.setAlpha(Utils.FLAG_ALPHA_UNSELECTED); } selectedMarker.setAlpha(Utils.FLAG_ALPHA_SELECTED); } // by returning false we can show text on flag in the map // return false; return true; } private void updateMarkersOnMap() { this.markers = new ArrayList<>(); this.flags = FlagsStorage.getSharedStorage().getOrderedFlags(getActivity(), FlagsStorage.Type.BAG ); if (this.flags != null && this.gMap != null) { this.gMap.clear(); //zooms around all the Flags LatLngBounds.Builder builder = new LatLngBounds.Builder(); int index = 0; for (Flag f : this.flags) { // Log.d(TAG, "===FLAG DATA==="); // Log.d(TAG, f.getObjectId()); // Log.d(TAG, f.getText()); // Log.d(TAG, "==============="); ParseGeoPoint location = f.getLocation(); String text = f.getText(); LatLng latLng = new LatLng(location.getLatitude(), location.getLongitude()); builder.include(latLng); //25% size original icon int marker_id = Utils.getIconForCategory(f.getCategory(), getActivity()); Bitmap marker = BitmapFactory.decodeResource(getResources(), marker_id); Bitmap halfSizeMarker = Bitmap.createScaledBitmap (marker, (int) (marker.getWidth() * Utils.FLAG_SCALE_NORMAL), (int) (marker.getHeight() * Utils.FLAG_SCALE_NORMAL), false); Marker newMarker = this.gMap.addMarker(new MarkerOptions() .position(latLng) .title(text) .snippet(index + "") .icon(BitmapDescriptorFactory.fromBitmap(halfSizeMarker)) // .icon(BitmapDescriptorFactory.fromResource(getIconForCategory(f.getCategory()))) //.icon(BitmapDescriptorFactory.defaultMarker(getCategoryColor(f.getCategory()))) .alpha(Utils.FLAG_ALPHA_NORMAL)); this.markers.add(newMarker); index++; } if (this.flags.size() > 0) { LatLngBounds bounds = builder.build(); // this.gMap.moveCamera(CameraUpdateFactory.newLatLngBounds(bounds, Utils.MAP_BOUNDS)); this.gMap.moveCamera(CameraUpdateFactory.newLatLngBounds(bounds, Utils.MAP_BOUNDS)); } else { Location currentLocation = gMap.getMyLocation(); if (currentLocation != null) { LatLng currentLocationLatLng = new LatLng(currentLocation.getLatitude(), currentLocation.getLongitude()); this.gMap.animateCamera(CameraUpdateFactory .newLatLngZoom(currentLocationLatLng, Utils.ZOOM_LVL)); } } } } @Override public void onRefresh() { refresh(); srl.setRefreshing(false); } protected void refresh() { ((MainActivity) getActivity()).refresh(Utils.BAG_FLAGS_CODE); } }
small refactoring
app/src/main/java/com/gcw/sapienza/places/fragments/BagFragment.java
small refactoring
<ide><path>pp/src/main/java/com/gcw/sapienza/places/fragments/BagFragment.java <ide> import android.support.v4.app.Fragment; <ide> import android.support.v4.app.FragmentActivity; <ide> import android.support.v4.content.LocalBroadcastManager; <del>import android.support.v4.widget.DrawerLayout; <ide> import android.support.v4.widget.SwipeRefreshLayout; <ide> import android.support.v7.widget.LinearLayoutManager; <ide> import android.support.v7.widget.RecyclerView; <ide> import android.view.View; <ide> import android.view.ViewGroup; <ide> import android.view.animation.AlphaAnimation; <del>import android.widget.FrameLayout; <del>import android.widget.LinearLayout; <ide> import android.widget.RelativeLayout; <del>import android.widget.TextView; <ide> <ide> import com.gcw.sapienza.places.R; <ide> import com.gcw.sapienza.places.activities.MainActivity; <ide> GoogleMap.OnMarkerClickListener{ <ide> private static final String TAG = "BagFragment"; <ide> <del> private View view; <ide> private GoogleMap gMap; <ide> private BroadcastReceiver receiver; <ide> private RelativeLayout progressBarHolder; <ide> private MSwipeRefreshLayout srl; <ide> private FragmentActivity myContext; <del> private List<Flag> flags; <del> private TextView progressTextView; <del> private DrawerLayout drawerLayout; <del> private LinearLayout homeHolder; <del> private FrameLayout fragHolder; <ide> <ide> private List<Marker> markers; <ide> <ide> public View onCreateView(LayoutInflater inflater, @Nullable ViewGroup container, @Nullable Bundle savedInstanceState) { <ide> super.onCreateView(inflater, container, savedInstanceState); <ide> <del> view = inflater.inflate(R.layout.my_flags_layout, container, false); <add> View view = inflater.inflate(R.layout.my_flags_layout, container, false); <ide> <ide> this.progressBarHolder = (RelativeLayout) view.findViewById(R.id.frame_layout); <del> this.progressTextView = (TextView) view.findViewById(R.id.share_progress_text_view); <del> <del> this.homeHolder = (LinearLayout) view.findViewById(R.id.my_home_container); <del> this.fragHolder = (FrameLayout) view.findViewById(R.id.my_frag_container); <ide> <ide> srl = (MSwipeRefreshLayout) view.findViewById(R.id.my_swipe_refresh); <ide> srl.setOnRefreshListener(this); <ide> <ide> this.markers = new ArrayList<>(); <ide> <del> this.flags = FlagsStorage.getSharedStorage().getOrderedFlags(getActivity(), FlagsStorage.Type.BAG ); <del> <del> if (this.flags != null && this.gMap != null) { <add> List<Flag> flags = FlagsStorage.getSharedStorage().getOrderedFlags(getActivity(), FlagsStorage.Type.BAG ); <add> <add> if (flags != null && this.gMap != null) { <ide> this.gMap.clear(); <ide> <ide> //zooms around all the Flags <ide> <ide> int index = 0; <ide> <del> for (Flag f : this.flags) { <add> for (Flag f : flags) { <ide> // Log.d(TAG, "===FLAG DATA==="); <ide> // Log.d(TAG, f.getObjectId()); <ide> // Log.d(TAG, f.getText()); <ide> Marker newMarker = this.gMap.addMarker(new MarkerOptions() <ide> .position(latLng) <ide> .title(text) <del> .snippet(index + "") <add> .snippet( String.valueOf(index) ) <ide> .icon(BitmapDescriptorFactory.fromBitmap(halfSizeMarker)) <ide> // .icon(BitmapDescriptorFactory.fromResource(getIconForCategory(f.getCategory()))) <ide> //.icon(BitmapDescriptorFactory.defaultMarker(getCategoryColor(f.getCategory()))) <ide> index++; <ide> } <ide> <del> if (this.flags.size() > 0) { <add> if (flags.size() > 0) { <ide> LatLngBounds bounds = builder.build(); <ide> // this.gMap.moveCamera(CameraUpdateFactory.newLatLngBounds(bounds, Utils.MAP_BOUNDS)); <ide> this.gMap.moveCamera(CameraUpdateFactory.newLatLngBounds(bounds, Utils.MAP_BOUNDS));
Java
apache-2.0
9201b6498b55c9e4d9d2e92fea38ddaad42e6832
0
alexryndin/ambari,arenadata/ambari,alexryndin/ambari,alexryndin/ambari,arenadata/ambari,arenadata/ambari,arenadata/ambari,alexryndin/ambari,alexryndin/ambari,alexryndin/ambari,arenadata/ambari,arenadata/ambari,alexryndin/ambari,alexryndin/ambari,arenadata/ambari,arenadata/ambari,alexryndin/ambari,arenadata/ambari,arenadata/ambari,arenadata/ambari,alexryndin/ambari,alexryndin/ambari
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ambari.server.controller; import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.AMBARI_DB_RCA_DRIVER; import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.AMBARI_DB_RCA_PASSWORD; import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.AMBARI_DB_RCA_URL; import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.AMBARI_DB_RCA_USERNAME; import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.CLIENTS_TO_UPDATE_CONFIGS; import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.COMMAND_RETRY_ENABLED; import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.COMMAND_TIMEOUT; import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.DB_DRIVER_FILENAME; import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.GROUP_LIST; import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.HOOKS_FOLDER; import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.MAX_DURATION_OF_RETRIES; import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.NOT_MANAGED_HDFS_PATH_LIST; import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.PACKAGE_LIST; import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.PACKAGE_VERSION; import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.REPO_INFO; import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.SCRIPT; import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.SCRIPT_TYPE; import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.SERVICE_PACKAGE_FOLDER; import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.SERVICE_REPO_INFO; import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.USER_LIST; import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.VERSION; import java.io.File; import java.io.FileNotFoundException; import java.io.FileReader; import java.io.IOException; import java.lang.reflect.Type; import java.net.InetAddress; import java.text.MessageFormat; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.EnumMap; import java.util.EnumSet; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.LinkedHashSet; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import java.util.TreeMap; import java.util.concurrent.TimeUnit; import java.util.concurrent.locks.Lock; import org.apache.ambari.server.AmbariException; import org.apache.ambari.server.ClusterNotFoundException; import org.apache.ambari.server.DuplicateResourceException; import org.apache.ambari.server.HostNotFoundException; import org.apache.ambari.server.ObjectNotFoundException; import org.apache.ambari.server.ParentObjectNotFoundException; import org.apache.ambari.server.Role; import org.apache.ambari.server.RoleCommand; import org.apache.ambari.server.ServiceComponentHostNotFoundException; import org.apache.ambari.server.ServiceComponentNotFoundException; import org.apache.ambari.server.ServiceNotFoundException; import org.apache.ambari.server.StackAccessException; import org.apache.ambari.server.actionmanager.ActionManager; import org.apache.ambari.server.actionmanager.HostRoleCommand; import org.apache.ambari.server.actionmanager.RequestFactory; import org.apache.ambari.server.actionmanager.Stage; import org.apache.ambari.server.actionmanager.StageFactory; import org.apache.ambari.server.agent.ExecutionCommand; import org.apache.ambari.server.agent.ExecutionCommand.KeyNames; import org.apache.ambari.server.api.services.AmbariMetaInfo; import org.apache.ambari.server.configuration.Configuration; import org.apache.ambari.server.configuration.Configuration.DatabaseType; import org.apache.ambari.server.controller.internal.RequestOperationLevel; import org.apache.ambari.server.controller.internal.RequestResourceFilter; import org.apache.ambari.server.controller.internal.RequestStageContainer; import org.apache.ambari.server.controller.internal.URLStreamProvider; import org.apache.ambari.server.controller.internal.WidgetLayoutResourceProvider; import org.apache.ambari.server.controller.internal.WidgetResourceProvider; import org.apache.ambari.server.controller.metrics.timeline.cache.TimelineMetricCacheProvider; import org.apache.ambari.server.controller.spi.Resource; import org.apache.ambari.server.customactions.ActionDefinition; import org.apache.ambari.server.metadata.ActionMetadata; import org.apache.ambari.server.metadata.RoleCommandOrder; import org.apache.ambari.server.orm.dao.ClusterDAO; import org.apache.ambari.server.orm.dao.ClusterVersionDAO; import org.apache.ambari.server.orm.dao.RepositoryVersionDAO; import org.apache.ambari.server.orm.dao.WidgetDAO; import org.apache.ambari.server.orm.dao.WidgetLayoutDAO; import org.apache.ambari.server.orm.entities.ClusterEntity; import org.apache.ambari.server.orm.entities.ClusterVersionEntity; import org.apache.ambari.server.orm.entities.OperatingSystemEntity; import org.apache.ambari.server.orm.entities.RepositoryEntity; import org.apache.ambari.server.orm.entities.RepositoryVersionEntity; import org.apache.ambari.server.orm.entities.WidgetEntity; import org.apache.ambari.server.orm.entities.WidgetLayoutEntity; import org.apache.ambari.server.orm.entities.WidgetLayoutUserWidgetEntity; import org.apache.ambari.server.scheduler.ExecutionScheduleManager; import org.apache.ambari.server.security.authorization.AuthorizationException; import org.apache.ambari.server.security.authorization.AuthorizationHelper; import org.apache.ambari.server.security.authorization.Group; import org.apache.ambari.server.security.authorization.ResourceType; import org.apache.ambari.server.security.authorization.RoleAuthorization; import org.apache.ambari.server.security.authorization.User; import org.apache.ambari.server.security.authorization.Users; import org.apache.ambari.server.security.credential.PrincipalKeyCredential; import org.apache.ambari.server.security.encryption.CredentialStoreService; import org.apache.ambari.server.security.encryption.CredentialStoreType; import org.apache.ambari.server.security.ldap.AmbariLdapDataPopulator; import org.apache.ambari.server.security.ldap.LdapBatchDto; import org.apache.ambari.server.security.ldap.LdapSyncDto; import org.apache.ambari.server.serveraction.kerberos.KerberosInvalidConfigurationException; import org.apache.ambari.server.serveraction.kerberos.KerberosOperationException; import org.apache.ambari.server.stageplanner.RoleGraph; import org.apache.ambari.server.stageplanner.RoleGraphFactory; import org.apache.ambari.server.state.Cluster; import org.apache.ambari.server.state.Clusters; import org.apache.ambari.server.state.CommandScriptDefinition; import org.apache.ambari.server.state.ComponentInfo; import org.apache.ambari.server.state.Config; import org.apache.ambari.server.state.ConfigFactory; import org.apache.ambari.server.state.ConfigHelper; import org.apache.ambari.server.state.Host; import org.apache.ambari.server.state.HostComponentAdminState; import org.apache.ambari.server.state.HostState; import org.apache.ambari.server.state.MaintenanceState; import org.apache.ambari.server.state.OperatingSystemInfo; import org.apache.ambari.server.state.PropertyDependencyInfo; import org.apache.ambari.server.state.PropertyInfo; import org.apache.ambari.server.state.PropertyInfo.PropertyType; import org.apache.ambari.server.state.RepositoryInfo; import org.apache.ambari.server.state.RepositoryVersionState; import org.apache.ambari.server.state.SecurityType; import org.apache.ambari.server.state.Service; import org.apache.ambari.server.state.ServiceComponent; import org.apache.ambari.server.state.ServiceComponentFactory; import org.apache.ambari.server.state.ServiceComponentHost; import org.apache.ambari.server.state.ServiceComponentHostEvent; import org.apache.ambari.server.state.ServiceComponentHostFactory; import org.apache.ambari.server.state.ServiceFactory; import org.apache.ambari.server.state.ServiceInfo; import org.apache.ambari.server.state.ServiceOsSpecific; import org.apache.ambari.server.state.StackId; import org.apache.ambari.server.state.StackInfo; import org.apache.ambari.server.state.State; import org.apache.ambari.server.state.configgroup.ConfigGroupFactory; import org.apache.ambari.server.state.repository.VersionDefinitionXml; import org.apache.ambari.server.state.scheduler.RequestExecutionFactory; import org.apache.ambari.server.state.stack.RepositoryXml; import org.apache.ambari.server.state.stack.WidgetLayout; import org.apache.ambari.server.state.stack.WidgetLayoutInfo; import org.apache.ambari.server.state.svccomphost.ServiceComponentHostInstallEvent; import org.apache.ambari.server.state.svccomphost.ServiceComponentHostStartEvent; import org.apache.ambari.server.state.svccomphost.ServiceComponentHostStopEvent; import org.apache.ambari.server.state.svccomphost.ServiceComponentHostUpgradeEvent; import org.apache.ambari.server.utils.SecretReference; import org.apache.ambari.server.utils.StageUtils; import org.apache.commons.collections.CollectionUtils; import org.apache.commons.io.IOUtils; import org.apache.commons.lang.StringUtils; import org.apache.commons.lang.math.NumberUtils; import org.apache.http.client.utils.URIBuilder; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.cache.Cache; import com.google.common.cache.CacheBuilder; import com.google.common.collect.ArrayListMultimap; import com.google.common.collect.Multimap; import com.google.gson.Gson; import com.google.gson.reflect.TypeToken; import com.google.inject.Inject; import com.google.inject.Injector; import com.google.inject.Singleton; import com.google.inject.persist.Transactional; @Singleton public class AmbariManagementControllerImpl implements AmbariManagementController { private final static Logger LOG = LoggerFactory.getLogger(AmbariManagementControllerImpl.class); /** * Property name of request context. */ private static final String REQUEST_CONTEXT_PROPERTY = "context"; private static final String CLUSTER_PHASE_PROPERTY = "phase"; private static final String CLUSTER_PHASE_INITIAL_INSTALL = "INITIAL_INSTALL"; private static final String CLUSTER_PHASE_INITIAL_START = "INITIAL_START"; private static final String BASE_LOG_DIR = "/tmp/ambari"; private final Clusters clusters; private final ActionManager actionManager; private final Injector injector; private final Gson gson; @Inject private ServiceFactory serviceFactory; @Inject private ServiceComponentFactory serviceComponentFactory; @Inject private ServiceComponentHostFactory serviceComponentHostFactory; @Inject private ConfigFactory configFactory; @Inject private StageFactory stageFactory; @Inject private RequestFactory requestFactory; @Inject private ActionMetadata actionMetadata; @Inject private AmbariMetaInfo ambariMetaInfo; @Inject private Users users; @Inject private HostsMap hostsMap; @Inject private Configuration configs; @Inject private AbstractRootServiceResponseFactory rootServiceResponseFactory; @Inject private RoleGraphFactory roleGraphFactory; @Inject private ConfigGroupFactory configGroupFactory; @Inject private ConfigHelper configHelper; @Inject private RequestExecutionFactory requestExecutionFactory; @Inject private ExecutionScheduleManager executionScheduleManager; @Inject private AmbariLdapDataPopulator ldapDataPopulator; @Inject private RepositoryVersionDAO repositoryVersionDAO; @Inject private WidgetDAO widgetDAO; @Inject private WidgetLayoutDAO widgetLayoutDAO; @Inject private ClusterDAO clusterDAO; @Inject private CredentialStoreService credentialStoreService; @Inject private ClusterVersionDAO clusterVersionDAO; private MaintenanceStateHelper maintenanceStateHelper; /** * The KerberosHelper to help setup for enabling for disabling Kerberos */ private KerberosHelper kerberosHelper; final private String masterHostname; final private Integer masterPort; final private String masterProtocol; final private static String JDK_RESOURCE_LOCATION = "/resources/"; final private static int REPO_URL_CONNECT_TIMEOUT = 3000; final private static int REPO_URL_READ_TIMEOUT = 2000; final private String jdkResourceUrl; final private String javaHome; final private String jdkName; final private String jceName; final private String ojdbcUrl; final private String serverDB; final private String mysqljdbcUrl; private boolean ldapSyncInProgress; private Cache<ClusterRequest, ClusterResponse> clusterUpdateCache = CacheBuilder.newBuilder().expireAfterWrite(5, TimeUnit.MINUTES).build(); @Inject private AmbariCustomCommandExecutionHelper customCommandExecutionHelper; @Inject private AmbariActionExecutionHelper actionExecutionHelper; @Inject public AmbariManagementControllerImpl(ActionManager actionManager, Clusters clusters, Injector injector) throws Exception { this.clusters = clusters; this.actionManager = actionManager; this.injector = injector; injector.injectMembers(this); gson = injector.getInstance(Gson.class); LOG.info("Initializing the AmbariManagementControllerImpl"); masterHostname = InetAddress.getLocalHost().getCanonicalHostName(); maintenanceStateHelper = injector.getInstance(MaintenanceStateHelper.class); kerberosHelper = injector.getInstance(KerberosHelper.class); if(configs != null) { if (configs.getApiSSLAuthentication()) { masterProtocol = "https"; masterPort = configs.getClientSSLApiPort(); } else { masterProtocol = "http"; masterPort = configs.getClientApiPort(); } jdkResourceUrl = getAmbariServerURI(JDK_RESOURCE_LOCATION); javaHome = configs.getJavaHome(); jdkName = configs.getJDKName(); jceName = configs.getJCEName(); ojdbcUrl = getAmbariServerURI(JDK_RESOURCE_LOCATION + "/" + configs.getOjdbcJarName()); mysqljdbcUrl = getAmbariServerURI(JDK_RESOURCE_LOCATION + "/" + configs.getMySQLJarName()); serverDB = configs.getServerDBName(); } else { masterProtocol = null; masterPort = null; jdkResourceUrl = null; javaHome = null; jdkName = null; jceName = null; ojdbcUrl = null; mysqljdbcUrl = null; serverDB = null; } } @Override public String getAmbariServerURI(String path) { if(masterProtocol==null || masterHostname==null || masterPort==null) { return null; } URIBuilder uriBuilder = new URIBuilder(); uriBuilder.setScheme(masterProtocol); uriBuilder.setHost(masterHostname); uriBuilder.setPort(masterPort); String[] parts = path.split("\\?"); if (parts.length > 1) { uriBuilder.setPath(parts[0]); uriBuilder.setQuery(parts[1]); } else { uriBuilder.setPath(path); } return uriBuilder.toString(); } @Override public RoleCommandOrder getRoleCommandOrder(Cluster cluster) { RoleCommandOrder rco; rco = injector.getInstance(RoleCommandOrder.class); rco.initialize(cluster); return rco; } @Override public void createCluster(ClusterRequest request) throws AmbariException { if (request.getClusterName() == null || request.getClusterName().isEmpty() || request.getClusterId() != null) { throw new IllegalArgumentException("Cluster name should be provided" + " and clusterId should be null"); } if (LOG.isDebugEnabled()) { LOG.debug("Received a createCluster request" + ", clusterName=" + request.getClusterName() + ", request=" + request); } if (request.getStackVersion() == null || request.getStackVersion().isEmpty()) { throw new IllegalArgumentException("Stack information should be" + " provided when creating a cluster"); } StackId stackId = new StackId(request.getStackVersion()); StackInfo stackInfo = ambariMetaInfo.getStack(stackId.getStackName(), stackId.getStackVersion()); if (stackInfo == null) { throw new StackAccessException("stackName=" + stackId.getStackName() + ", stackVersion=" + stackId.getStackVersion()); } RepositoryVersionEntity versionEntity = null; if (null != request.getRepositoryVersion()) { versionEntity = repositoryVersionDAO.findByStackAndVersion(stackId, request.getRepositoryVersion()); if (null == versionEntity) { throw new AmbariException(String.format("Tried to create a cluster on version %s, but that version doesn't exist", request.getRepositoryVersion())); } } // FIXME add support for desired configs at cluster level boolean foundInvalidHosts = false; StringBuilder invalidHostsStr = new StringBuilder(); if (request.getHostNames() != null) { for (String hostname : request.getHostNames()) { try { clusters.getHost(hostname); } catch (HostNotFoundException e) { if (foundInvalidHosts) { invalidHostsStr.append(","); } foundInvalidHosts = true; invalidHostsStr.append(hostname); } } } if (foundInvalidHosts) { throw new HostNotFoundException(invalidHostsStr.toString()); } clusters.addCluster(request.getClusterName(), stackId, request.getSecurityType()); Cluster c = clusters.getCluster(request.getClusterName()); if (request.getHostNames() != null) { clusters.mapHostsToCluster(request.getHostNames(), request.getClusterName()); } // Create cluster widgets and layouts initializeWidgetsAndLayouts(c, null); if (null != versionEntity) { ClusterVersionDAO clusterVersionDAO = injector.getInstance(ClusterVersionDAO.class); ClusterVersionEntity clusterVersion = clusterVersionDAO.findByClusterAndStackAndVersion(request.getClusterName(), stackId, request.getRepositoryVersion()); if (null == clusterVersion) { c.createClusterVersion(stackId, versionEntity.getVersion(), getAuthName(), RepositoryVersionState.INIT); } } } @Override public synchronized void createHostComponents(Set<ServiceComponentHostRequest> requests) throws AmbariException, AuthorizationException { if (requests.isEmpty()) { LOG.warn("Received an empty requests set"); return; } // do all validation checks Map<String, Map<String, Map<String, Set<String>>>> hostComponentNames = new HashMap<String, Map<String, Map<String, Set<String>>>>(); Set<String> duplicates = new HashSet<String>(); for (ServiceComponentHostRequest request : requests) { validateServiceComponentHostRequest(request); Cluster cluster; try { cluster = clusters.getCluster(request.getClusterName()); } catch (ClusterNotFoundException e) { throw new ParentObjectNotFoundException( "Attempted to add a host_component to a cluster which doesn't exist: ", e); } if(!AuthorizationHelper.isAuthorized(ResourceType.CLUSTER, cluster.getResourceId(), EnumSet.of(RoleAuthorization.SERVICE_ADD_DELETE_SERVICES,RoleAuthorization.HOST_ADD_DELETE_COMPONENTS))) { throw new AuthorizationException("The authenticated user is not authorized to install service components on to hosts"); } if (StringUtils.isEmpty(request.getServiceName())) { request.setServiceName(findServiceName(cluster, request.getComponentName())); } if (LOG.isDebugEnabled()) { LOG.debug("Received a createHostComponent request" + ", clusterName=" + request.getClusterName() + ", serviceName=" + request.getServiceName() + ", componentName=" + request.getComponentName() + ", hostname=" + request.getHostname() + ", request=" + request); } if (!hostComponentNames.containsKey(request.getClusterName())) { hostComponentNames.put(request.getClusterName(), new HashMap<String, Map<String,Set<String>>>()); } if (!hostComponentNames.get(request.getClusterName()) .containsKey(request.getServiceName())) { hostComponentNames.get(request.getClusterName()).put( request.getServiceName(), new HashMap<String, Set<String>>()); } if (!hostComponentNames.get(request.getClusterName()) .get(request.getServiceName()) .containsKey(request.getComponentName())) { hostComponentNames.get(request.getClusterName()) .get(request.getServiceName()).put(request.getComponentName(), new HashSet<String>()); } if (hostComponentNames.get(request.getClusterName()) .get(request.getServiceName()) .get(request.getComponentName()) .contains(request.getHostname())) { duplicates.add("[clusterName=" + request.getClusterName() + ", hostName=" + request.getHostname() + ", componentName=" +request.getComponentName() +']'); continue; } hostComponentNames.get(request.getClusterName()) .get(request.getServiceName()).get(request.getComponentName()) .add(request.getHostname()); if (request.getDesiredState() != null && !request.getDesiredState().isEmpty()) { State state = State.valueOf(request.getDesiredState()); if (!state.isValidDesiredState() || state != State.INIT) { throw new IllegalArgumentException("Invalid desired state" + " only INIT state allowed during creation" + ", providedDesiredState=" + request.getDesiredState()); } } Service s; try { s = cluster.getService(request.getServiceName()); } catch (ServiceNotFoundException e) { throw new IllegalArgumentException( "The service[" + request.getServiceName() + "] associated with the component[" + request.getComponentName() + "] doesn't exist for the cluster[" + request.getClusterName() + "]"); } ServiceComponent sc = s.getServiceComponent( request.getComponentName()); setRestartRequiredServices(s, request.getComponentName()); Host host; try { host = clusters.getHost(request.getHostname()); } catch (HostNotFoundException e) { throw new ParentObjectNotFoundException( "Attempted to add a host_component to a host that doesn't exist: ", e); } Set<Cluster> mappedClusters = clusters.getClustersForHost(request.getHostname()); boolean validCluster = false; if (LOG.isDebugEnabled()) { LOG.debug("Looking to match host to cluster" + ", hostnameViaReg=" + host.getHostName() + ", hostname=" + request.getHostname() + ", clusterName=" + request.getClusterName() + ", hostClusterMapCount=" + mappedClusters.size()); } for (Cluster mappedCluster : mappedClusters) { if (LOG.isDebugEnabled()) { LOG.debug("Host belongs to cluster" + ", hostname=" + request.getHostname() + ", clusterName=" + mappedCluster.getClusterName()); } if (mappedCluster.getClusterName().equals( request.getClusterName())) { validCluster = true; break; } } if (!validCluster) { throw new ParentObjectNotFoundException("Attempted to add a host_component to a host that doesn't exist: " + "clusterName=" + request.getClusterName() + ", hostName=" + request.getHostname()); } try { ServiceComponentHost sch = sc.getServiceComponentHost( request.getHostname()); if (sch != null) { duplicates.add("[clusterName=" + request.getClusterName() + ", hostName=" + request.getHostname() + ", componentName=" +request.getComponentName() +']'); } } catch (AmbariException e) { // Expected } } // ensure only a single cluster update if (hostComponentNames.size() != 1) { throw new IllegalArgumentException("Invalid arguments - updates allowed" + " on only one cluster at a time"); } if (!duplicates.isEmpty()) { StringBuilder names = new StringBuilder(); boolean first = true; for (String hName : duplicates) { if (!first) { names.append(","); } first = false; names.append(hName); } String msg; if (duplicates.size() == 1) { msg = "Attempted to create a host_component which already exists: "; } else { msg = "Attempted to create host_component's which already exist: "; } throw new DuplicateResourceException(msg + names.toString()); } // set restartRequired flag for monitoring services setMonitoringServicesRestartRequired(requests); // now doing actual work persistServiceComponentHosts(requests); } void persistServiceComponentHosts(Set<ServiceComponentHostRequest> requests) throws AmbariException { Multimap<Cluster, ServiceComponentHost> schMap = ArrayListMultimap.create(); for (ServiceComponentHostRequest request : requests) { Cluster cluster = clusters.getCluster(request.getClusterName()); Service s = cluster.getService(request.getServiceName()); ServiceComponent sc = s.getServiceComponent( request.getComponentName()); ServiceComponentHost sch = serviceComponentHostFactory.createNew(sc, request.getHostname()); if (request.getDesiredState() != null && !request.getDesiredState().isEmpty()) { State state = State.valueOf(request.getDesiredState()); sch.setDesiredState(state); } sch.setDesiredStackVersion(sc.getDesiredStackVersion()); schMap.put(cluster, sch); } for (Cluster cluster : schMap.keySet()) { cluster.addServiceComponentHosts(schMap.get(cluster)); } } private void setMonitoringServicesRestartRequired( Set<ServiceComponentHostRequest> requests) throws AmbariException { for (ServiceComponentHostRequest request : requests) { Cluster cluster = clusters.getCluster(request.getClusterName()); StackId stackId = cluster.getCurrentStackVersion(); Collection<String> monitoringServices = ambariMetaInfo.getMonitoringServiceNames( stackId.getStackName(), stackId.getStackVersion()); for (String serviceName : monitoringServices) { if (cluster.getServices().containsKey(serviceName)) { Service service = cluster.getService(serviceName); for (ServiceComponent sc : service.getServiceComponents().values()) { if (sc.isMasterComponent()) { for (ServiceComponentHost sch : sc.getServiceComponentHosts().values()) { sch.setRestartRequired(true); } continue; } String hostname = request.getHostname(); if (sc.getServiceComponentHosts().containsKey(hostname)) { ServiceComponentHost sch = sc.getServiceComponentHost(hostname); sch.setRestartRequired(true); } } } } } } private void setRestartRequiredServices( Service service, String componentName) throws AmbariException { Cluster cluster = service.getCluster(); StackId stackId = cluster.getCurrentStackVersion(); if (service.getServiceComponent(componentName).isClientComponent()) { return; } Set<String> needRestartServices = ambariMetaInfo.getRestartRequiredServicesNames( stackId.getStackName(), stackId.getStackVersion()); if(needRestartServices.contains(service.getName())) { Map<String, ServiceComponent> m = service.getServiceComponents(); for (Entry<String, ServiceComponent> entry : m.entrySet()) { ServiceComponent serviceComponent = entry.getValue(); Map<String, ServiceComponentHost> schMap = serviceComponent.getServiceComponentHosts(); for (Entry<String, ServiceComponentHost> sch : schMap.entrySet()) { ServiceComponentHost serviceComponentHost = sch.getValue(); serviceComponentHost.setRestartRequired(true); } } } } @Override public void registerRackChange(String clusterName) throws AmbariException { Cluster cluster = clusters.getCluster(clusterName); StackId stackId = cluster.getCurrentStackVersion(); Set<String> rackSensitiveServices = ambariMetaInfo.getRackSensitiveServicesNames(stackId.getStackName(), stackId.getStackVersion()); Map<String, Service> services = cluster.getServices(); for (Service service : services.values()) { if(rackSensitiveServices.contains(service.getName())) { Map<String, ServiceComponent> serviceComponents = service.getServiceComponents(); for (ServiceComponent serviceComponent : serviceComponents.values()) { Map<String, ServiceComponentHost> schMap = serviceComponent.getServiceComponentHosts(); for (Entry<String, ServiceComponentHost> sch : schMap.entrySet()) { ServiceComponentHost serviceComponentHost = sch.getValue(); serviceComponentHost.setRestartRequired(true); } } } } } @Override public synchronized ConfigurationResponse createConfiguration( ConfigurationRequest request) throws AmbariException, AuthorizationException { if (null == request.getClusterName() || request.getClusterName().isEmpty() || null == request.getType() || request.getType().isEmpty() || null == request.getProperties()) { throw new IllegalArgumentException("Invalid Arguments," + " clustername, config type and configs should not" + " be null or empty"); } Cluster cluster = clusters.getCluster(request.getClusterName()); String configType = request.getType(); // If the config type is for a service, then allow a user with SERVICE_MODIFY_CONFIGS to // update, else ensure the user has CLUSTER_MODIFY_CONFIGS String service = null; try { service = cluster.getServiceForConfigTypes(Collections.singleton(configType)); } catch (IllegalArgumentException e) { // Ignore this since we may have hit a config type that spans multiple services. This may // happen in unit test cases but should not happen with later versions of stacks. } if(StringUtils.isEmpty(service)) { if (!AuthorizationHelper.isAuthorized(ResourceType.CLUSTER, cluster.getResourceId(), EnumSet.of(RoleAuthorization.CLUSTER_MODIFY_CONFIGS))) { throw new AuthorizationException("The authenticated user does not have authorization " + "to create cluster configurations"); } } else { if (!AuthorizationHelper.isAuthorized(ResourceType.CLUSTER, cluster.getResourceId(), EnumSet.of(RoleAuthorization.SERVICE_MODIFY_CONFIGS))) { throw new AuthorizationException("The authenticated user does not have authorization " + "to create service configurations"); } } Map<String, String> requestProperties = request.getProperties(); Map<PropertyInfo.PropertyType, Set<String>> propertiesTypes = cluster.getConfigPropertiesTypes(request.getType()); if(propertiesTypes.containsKey(PropertyType.PASSWORD)) { for(String passwordProperty : propertiesTypes.get(PropertyType.PASSWORD)) { if(requestProperties.containsKey(passwordProperty)) { String passwordPropertyValue = requestProperties.get(passwordProperty); if (!SecretReference.isSecret(passwordPropertyValue)) { continue; } SecretReference ref = new SecretReference(passwordPropertyValue, cluster); String refValue = ref.getValue(); requestProperties.put(passwordProperty, refValue); } } } Map<String, Config> configs = cluster.getConfigsByType( request.getType()); if (null == configs) { configs = new HashMap<String, Config>(); } // Configuration attributes are optional. If not present, use empty map Map<String, Map<String, String>> propertiesAttributes = request.getPropertiesAttributes(); if (null == propertiesAttributes) { propertiesAttributes = new HashMap<String, Map<String,String>>(); } if (configs.containsKey(request.getVersionTag())) { throw new AmbariException(MessageFormat.format("Configuration with tag ''{0}'' exists for ''{1}''", request.getVersionTag(), request.getType())); } handleGlobalsBackwardsCompability(request, propertiesAttributes); Config config = createConfig(cluster, request.getType(), requestProperties, request.getVersionTag(), propertiesAttributes); return new ConfigurationResponse(cluster.getClusterName(), config); } private void handleGlobalsBackwardsCompability(ConfigurationRequest request, Map<String, Map<String, String>> propertiesAttributes) throws AmbariException { Cluster cluster = clusters.getCluster(request.getClusterName()); if(request.getType().equals(Configuration.GLOBAL_CONFIG_TAG)) { Map<String, Map<String, String>> configTypes = new HashMap<String, Map<String, String>>(); configTypes.put(Configuration.GLOBAL_CONFIG_TAG, request.getProperties()); configHelper.moveDeprecatedGlobals(cluster.getCurrentStackVersion(), configTypes, cluster.getClusterName()); for(Map.Entry<String, Map<String, String>> configType : configTypes.entrySet()) { String configTypeName = configType.getKey(); Map<String, String> properties = configType.getValue(); if(configTypeName.equals(Configuration.GLOBAL_CONFIG_TAG)) { continue; } String tag; if(cluster.getConfigsByType(configTypeName) == null) { tag = "version1"; } else { tag = "version" + System.currentTimeMillis(); } Config config = createConfig(cluster, configTypeName, properties, tag, propertiesAttributes); if (config != null) { String authName = getAuthName(); if (cluster.addDesiredConfig(authName, Collections.singleton(config)) != null) { LOG.info("cluster '" + cluster.getClusterName() + "' " + "changed by: '" + authName + "'; " + "type='" + config.getType() + "' " + "tag='" + config.getTag()); } } } } } @Override public Config createConfig(Cluster cluster, String type, Map<String, String> properties, String versionTag, Map<String, Map<String, String>> propertiesAttributes) { Config config = configFactory.createNew(cluster, type, properties, propertiesAttributes); if (!StringUtils.isEmpty(versionTag)) { config.setTag(versionTag); } config.persist(); cluster.addConfig(config); return config; } @Override public void createUsers(Set<UserRequest> requests) throws AmbariException { for (UserRequest request : requests) { if (null == request.getUsername() || request.getUsername().isEmpty() || null == request.getPassword() || request.getPassword().isEmpty()) { throw new AmbariException("Username and password must be supplied."); } users.createUser(request.getUsername(), request.getPassword(), request.isActive(), request.isAdmin(), false); } } @Override public void createGroups(Set<GroupRequest> requests) throws AmbariException { for (GroupRequest request : requests) { if (StringUtils.isBlank(request.getGroupName())) { throw new AmbariException("Group name must be supplied."); } final Group group = users.getGroup(request.getGroupName()); if (group != null) { throw new AmbariException("Group already exists."); } users.createGroup(request.getGroupName()); } } @Override public void createMembers(Set<MemberRequest> requests) throws AmbariException { for (MemberRequest request : requests) { if (StringUtils.isBlank(request.getGroupName()) || StringUtils.isBlank(request.getUserName())) { throw new AmbariException("Both group name and user name must be supplied."); } users.addMemberToGroup(request.getGroupName(), request.getUserName()); } } @Override public Set<MemberResponse> getMembers(Set<MemberRequest> requests) throws AmbariException { final Set<MemberResponse> responses = new HashSet<MemberResponse>(); for (MemberRequest request: requests) { LOG.debug("Received a getMembers request, " + request.toString()); final Group group = users.getGroup(request.getGroupName()); if (null == group) { if (requests.size() == 1) { // only throw exception if there is a single request // if there are multiple requests, this indicates an OR predicate throw new ObjectNotFoundException("Cannot find group '" + request.getGroupName() + "'"); } } else { for (User user: users.getGroupMembers(group.getGroupName())) { final MemberResponse response = new MemberResponse(group.getGroupName(), user.getUserName()); responses.add(response); } } } return responses; } @Override @SuppressWarnings("unchecked") public synchronized void updateMembers(Set<MemberRequest> requests) throws AmbariException { // validate String groupName = null; for (MemberRequest request: requests) { if (groupName != null && !request.getGroupName().equals(groupName)) { throw new AmbariException("Can't manage members of different groups in one request"); } groupName = request.getGroupName(); } final List<String> requiredMembers = new ArrayList<String>(); for (MemberRequest request: requests) { if (request.getUserName() != null) { requiredMembers.add(request.getUserName()); } } final List<String> currentMembers = users.getAllMembers(groupName); for (String user: (Collection<String>) CollectionUtils.subtract(currentMembers, requiredMembers)) { users.removeMemberFromGroup(groupName, user); } for (String user: (Collection<String>) CollectionUtils.subtract(requiredMembers, currentMembers)) { users.addMemberToGroup(groupName, user); } } private Stage createNewStage(long id, Cluster cluster, long requestId, String requestContext, String clusterHostInfo, String commandParamsStage, String hostParamsStage) { String logDir = BASE_LOG_DIR + File.pathSeparator + requestId; Stage stage = stageFactory.createNew(requestId, logDir, null == cluster ? null : cluster.getClusterName(), null == cluster ? -1L : cluster.getClusterId(), requestContext, clusterHostInfo, commandParamsStage, hostParamsStage); stage.setStageId(id); return stage; } private Set<ClusterResponse> getClusters(ClusterRequest request) throws AmbariException, AuthorizationException { Set<ClusterResponse> response = new HashSet<ClusterResponse>(); if (LOG.isDebugEnabled()) { LOG.debug("Received a getClusters request" + ", clusterName=" + request.getClusterName() + ", clusterId=" + request.getClusterId() + ", stackInfo=" + request.getStackVersion()); } Cluster singleCluster = null; try { if (request.getClusterName() != null) { singleCluster = clusters.getCluster(request.getClusterName()); } else if (request.getClusterId() != null) { singleCluster = clusters.getClusterById(request.getClusterId()); } } catch(ClusterNotFoundException e) { // the user shouldn't know the difference between a cluster that does not exist or one that // he doesn't have access to. if (AuthorizationHelper.isAuthorized(ResourceType.AMBARI, null, RoleAuthorization.AMBARI_ADD_DELETE_CLUSTERS)) { throw e; } else { throw new AuthorizationException(); } } if (singleCluster != null) { ClusterResponse cr = singleCluster.convertToResponse(); cr.setDesiredConfigs(singleCluster.getDesiredConfigs()); cr.setDesiredServiceConfigVersions(singleCluster.getActiveServiceConfigVersions()); cr.setCredentialStoreServiceProperties(getCredentialStoreServiceProperties()); // If the user is authorized to view information about this cluster, add it to the response // TODO: Uncomment this when the UI doesn't require view access for View-only users. // if (AuthorizationHelper.isAuthorized(ResourceType.CLUSTER, cr.getResourceId(), // RoleAuthorization.AUTHORIZATIONS_VIEW_CLUSTER)) { response.add(cr); // } // else { // // the user shouldn't know the difference between a cluster that does not exist or one that // // he doesn't have access to. // throw new AuthorizationException(); // } return response; } Map<String, Cluster> allClusters = clusters.getClusters(); for (Cluster c : allClusters.values()) { if (request.getStackVersion() != null) { if (!request.getStackVersion().equals( c.getDesiredStackVersion().getStackId())) { // skip non matching stack versions continue; } } // TODO: Uncomment this when the UI doesn't require view access for View-only users. // If the user is authorized to view information about this cluster, add it to the response // if (AuthorizationHelper.isAuthorized(ResourceType.CLUSTER, c.getResourceId(), // RoleAuthorization.AUTHORIZATIONS_VIEW_CLUSTER)) { ClusterResponse cr = c.convertToResponse(); cr.setDesiredConfigs(c.getDesiredConfigs()); cr.setDesiredServiceConfigVersions(c.getActiveServiceConfigVersions()); cr.setCredentialStoreServiceProperties(getCredentialStoreServiceProperties()); response.add(cr); // } } StringBuilder builder = new StringBuilder(); if (LOG.isDebugEnabled()) { clusters.debugDump(builder); LOG.debug("Cluster State for cluster " + builder.toString()); } return response; } private Set<ServiceComponentHostResponse> getHostComponents( ServiceComponentHostRequest request) throws AmbariException { LOG.debug("Processing request {}", request); if (request.getClusterName() == null || request.getClusterName().isEmpty()) { IllegalArgumentException e = new IllegalArgumentException("Invalid arguments, cluster name should not be null"); LOG.debug("Cluster not specified in request", e); throw e; } final Cluster cluster; try { cluster = clusters.getCluster(request.getClusterName()); } catch (ClusterNotFoundException e) { LOG.error("Cluster not found ", e); throw new ParentObjectNotFoundException("Parent Cluster resource doesn't exist", e); } if (request.getHostname() != null) { try { if (!clusters.getClustersForHost(request.getHostname()).contains(cluster)) { // case where host exists but not associated with given cluster LOG.error("Host doesn't belong to cluster - " + request.getHostname()); throw new ParentObjectNotFoundException("Parent Host resource doesn't exist", new HostNotFoundException(request.getClusterName(), request.getHostname())); } } catch (HostNotFoundException e) { LOG.error("Host not found", e); // creating new HostNotFoundException to add cluster name throw new ParentObjectNotFoundException("Parent Host resource doesn't exist", new HostNotFoundException(request.getClusterName(), request.getHostname())); } } if (request.getComponentName() != null) { if (request.getServiceName() == null || request.getServiceName().isEmpty()) { StackId stackId = cluster.getDesiredStackVersion(); String serviceName = ambariMetaInfo.getComponentToService(stackId.getStackName(), stackId.getStackVersion(), request.getComponentName()); if (LOG.isDebugEnabled()) { LOG.debug("Looking up service name for component" + ", componentName=" + request.getComponentName() + ", serviceName=" + serviceName + ", stackInfo=" + stackId.getStackId()); } if (serviceName == null || serviceName.isEmpty()) { LOG.error("Unable to find service for component {}", request.getComponentName()); throw new ServiceComponentHostNotFoundException( cluster.getClusterName(), null, request.getComponentName(), request.getHostname()); } request.setServiceName(serviceName); } } Set<Service> services = new HashSet<Service>(); if (request.getServiceName() != null && !request.getServiceName().isEmpty()) { services.add(cluster.getService(request.getServiceName())); } else { services.addAll(cluster.getServices().values()); } Set<ServiceComponentHostResponse> response = new HashSet<ServiceComponentHostResponse>(); boolean checkDesiredState = false; State desiredStateToCheck = null; boolean checkState = false; State stateToCheck = null; boolean filterBasedConfigStaleness = false; boolean staleConfig = true; if (request.getStaleConfig() != null) { filterBasedConfigStaleness = true; staleConfig = "true".equals(request.getStaleConfig().toLowerCase()); } if (request.getDesiredState() != null && !request.getDesiredState().isEmpty()) { desiredStateToCheck = State.valueOf(request.getDesiredState()); if (!desiredStateToCheck.isValidDesiredState()) { throw new IllegalArgumentException("Invalid arguments, invalid desired" + " state, desiredState=" + desiredStateToCheck); } checkDesiredState = true; } if (!StringUtils.isEmpty(request.getState())) { stateToCheck = State.valueOf(request.getState()); // maybe check should be more wider if (stateToCheck == null) { throw new IllegalArgumentException("Invalid arguments, invalid state, State=" + request.getState()); } checkState = true; } Map<String, Host> hosts = clusters.getHostsForCluster(cluster.getClusterName()); for (Service s : services) { // filter on component name if provided Set<ServiceComponent> components = new HashSet<ServiceComponent>(); if (request.getComponentName() != null) { components.add(s.getServiceComponent(request.getComponentName())); } else { components.addAll(s.getServiceComponents().values()); } for (ServiceComponent sc : components) { if (request.getComponentName() != null) { if (!sc.getName().equals(request.getComponentName())) { continue; } } // filter on hostname if provided // filter on desired state if provided Map<String, ServiceComponentHost> serviceComponentHostMap = sc.getServiceComponentHosts(); if (request.getHostname() != null) { try { if (serviceComponentHostMap == null || !serviceComponentHostMap.containsKey(request.getHostname())) { throw new ServiceComponentHostNotFoundException(cluster.getClusterName(), s.getName(), sc.getName(), request.getHostname()); } ServiceComponentHost sch = serviceComponentHostMap.get(request.getHostname()); if (null == sch) { // It's possible that the host was deleted during the time that the request was generated. continue; } if (checkDesiredState && (desiredStateToCheck != sch.getDesiredState())) { continue; } if (checkState && stateToCheck != sch.getState()) { continue; } if (request.getAdminState() != null) { String stringToMatch = sch.getComponentAdminState() == null ? "" : sch.getComponentAdminState().name(); if (!request.getAdminState().equals(stringToMatch)) { continue; } } ServiceComponentHostResponse r = sch.convertToResponse(); if (null == r || (filterBasedConfigStaleness && r.isStaleConfig() != staleConfig)) { continue; } Host host = hosts.get(sch.getHostName()); if (host == null) { throw new HostNotFoundException(cluster.getClusterName(), sch.getHostName()); } r.setMaintenanceState(maintenanceStateHelper.getEffectiveState(sch, host).name()); response.add(r); } catch (ServiceComponentHostNotFoundException e) { if (request.getServiceName() == null || request.getComponentName() == null) { // Ignore the exception if either the service name or component name are not specified. // This is an artifact of how we get host_components and can happen in the case where // we get all host_components for a host, for example. LOG.debug("Ignoring not specified host_component ", e); } else { // Otherwise rethrow the exception and let the caller decide if it's an error condition. // Logging the exception as debug since this does not necessarily indicate an error // condition. LOG.debug("ServiceComponentHost not found ", e); throw new ServiceComponentHostNotFoundException(cluster.getClusterName(), request.getServiceName(), request.getComponentName(), request.getHostname()); } } } else { for (ServiceComponentHost sch : serviceComponentHostMap.values()) { if (null == sch) { // It's possible that the host was deleted during the time that the request was generated. continue; } if (checkDesiredState && (desiredStateToCheck != sch.getDesiredState())) { continue; } if (checkState && stateToCheck != sch.getState()) { continue; } if (request.getAdminState() != null) { String stringToMatch = sch.getComponentAdminState() == null ? "" : sch.getComponentAdminState().name(); if (!request.getAdminState().equals(stringToMatch)) { continue; } } ServiceComponentHostResponse r = sch.convertToResponse(); if (null == r || (filterBasedConfigStaleness && r.isStaleConfig() != staleConfig)) { continue; } Host host = hosts.get(sch.getHostName()); if (host == null) { throw new HostNotFoundException(cluster.getClusterName(), sch.getHostName()); } r.setMaintenanceState(maintenanceStateHelper.getEffectiveState(sch, host).name()); response.add(r); } } } } return response; } @Override public MaintenanceState getEffectiveMaintenanceState(ServiceComponentHost sch) throws AmbariException { return maintenanceStateHelper.getEffectiveState(sch); } private Set<ConfigurationResponse> getConfigurations( ConfigurationRequest request) throws AmbariException { if (request.getClusterName() == null) { throw new IllegalArgumentException("Invalid arguments, cluster name" + " should not be null"); } Cluster cluster = clusters.getCluster(request.getClusterName()); Set<ConfigurationResponse> responses = new HashSet<ConfigurationResponse>(); // !!! if only one, then we need full properties if (null != request.getType() && null != request.getVersionTag()) { Config config = cluster.getConfig(request.getType(), request.getVersionTag()); if (null != config) { ConfigurationResponse response = new ConfigurationResponse( cluster.getClusterName(), config); responses.add(response); } } else { boolean includeProps = request.includeProperties(); if (null != request.getType()) { Map<String, Config> configs = cluster.getConfigsByType( request.getType()); if (null != configs) { for (Entry<String, Config> entry : configs.entrySet()) { Config config = entry.getValue(); ConfigurationResponse response = new ConfigurationResponse( cluster.getClusterName(), config.getStackId(), request.getType(), config.getTag(), entry.getValue().getVersion(), includeProps ? config.getProperties() : new HashMap<String, String>(), includeProps ? config.getPropertiesAttributes() : new HashMap<String, Map<String,String>>(), config.getPropertiesTypes()); responses.add(response); } } } else { // !!! all configuration Collection<Config> all = cluster.getAllConfigs(); for (Config config : all) { ConfigurationResponse response = new ConfigurationResponse( cluster.getClusterName(), config.getStackId(), config.getType(), config.getTag(), config.getVersion(), includeProps ? config.getProperties() : new HashMap<String, String>(), includeProps ? config.getPropertiesAttributes() : new HashMap<String, Map<String,String>>(), config.getPropertiesTypes()); responses.add(response); } } } return responses; } @Override public synchronized RequestStatusResponse updateClusters(Set<ClusterRequest> requests, Map<String, String> requestProperties) throws AmbariException, AuthorizationException { RequestStatusResponse response = null; // We have to allow for multiple requests to account for multiple // configuration updates (create multiple configuration resources)... for (ClusterRequest request : requests) { // TODO : Is there ever a real world case where we could have multiple non-null responses? // *************************************************** // set any session attributes for this cluster request Cluster cluster; if (request.getClusterId() == null) { cluster = clusters.getCluster(request.getClusterName()); } else { cluster = clusters.getClusterById(request.getClusterId()); } if (cluster == null) { throw new AmbariException("The cluster may not be null"); } Map<String, Object> sessionAttributes = request.getSessionAttributes(); // TODO: Once the UI uses the Credential Resource API, remove this block to _clean_ the // TODO: session attributes and store any KDC administrator credentials in the secure // TODO: credential provider facility. // For now, to keep things backwards compatible, get and remove the KDC administrator credentials // from the session attributes and store them in the CredentialsProvider. The KDC administrator // credentials are prefixed with kdc_admin/. The following attributes are expected, if setting // the KDC administrator credentials: // kerberos_admin/principal // kerberos_admin/password if((sessionAttributes != null) && !sessionAttributes.isEmpty()) { Map<String, Object> cleanedSessionAttributes = new HashMap<String, Object>(); String principal = null; char[] password = null; for(Map.Entry<String,Object> entry: sessionAttributes.entrySet()) { String name = entry.getKey(); Object value = entry.getValue(); if ("kerberos_admin/principal".equals(name)) { if(value instanceof String) { principal = (String)value; } } else if ("kerberos_admin/password".equals(name)) { if(value instanceof String) { password = ((String) value).toCharArray(); } } else { cleanedSessionAttributes.put(name, value); } } if(principal != null) { // The KDC admin principal exists... set the credentials in the credentials store credentialStoreService.setCredential(cluster.getClusterName(), KerberosHelper.KDC_ADMINISTRATOR_CREDENTIAL_ALIAS, new PrincipalKeyCredential(principal, password), CredentialStoreType.TEMPORARY); } sessionAttributes = cleanedSessionAttributes; } // TODO: END cluster.addSessionAttributes(sessionAttributes); // // *************************************************** response = updateCluster(request, requestProperties); } return response; } private synchronized RequestStatusResponse updateCluster(ClusterRequest request, Map<String, String> requestProperties) throws AmbariException, AuthorizationException { RequestStageContainer requestStageContainer = null; if (request.getClusterId() == null && (request.getClusterName() == null || request.getClusterName().isEmpty())) { throw new IllegalArgumentException("Invalid arguments, cluster id or cluster name should not be null"); } LOG.info("Received a updateCluster request" + ", clusterId=" + request.getClusterId() + ", clusterName=" + request.getClusterName() + ", securityType=" + request.getSecurityType() + ", request=" + request); final Cluster cluster; if (request.getClusterId() == null) { cluster = clusters.getCluster(request.getClusterName()); } else { cluster = clusters.getClusterById(request.getClusterId()); } // Ensure the user has access to update this cluster AuthorizationHelper.verifyAuthorization(ResourceType.CLUSTER, cluster.getResourceId(), RoleAuthorization.AUTHORIZATIONS_UPDATE_CLUSTER); //save data to return configurations created List<ConfigurationResponse> configurationResponses = new LinkedList<ConfigurationResponse>(); ServiceConfigVersionResponse serviceConfigVersionResponse = null; if (request.getDesiredConfig() != null && request.getServiceConfigVersionRequest() != null) { String msg = "Unable to set desired configs and rollback at same time, request = " + request.toString(); LOG.error(msg); throw new IllegalArgumentException(msg); } // set the new name of the cluster if change is requested if (!cluster.getClusterName().equals(request.getClusterName())) { if (LOG.isDebugEnabled()) { LOG.debug("Received cluster name change request from " + cluster.getClusterName() + " to " + request.getClusterName()); } if(!AuthorizationHelper.isAuthorized(ResourceType.AMBARI, null, EnumSet.of(RoleAuthorization.AMBARI_RENAME_CLUSTER))) { throw new AuthorizationException("The authenticated user does not have authorization to rename the cluster"); } cluster.setClusterName(request.getClusterName()); } //check if desired configs are available in request and they were changed boolean isConfigurationCreationNeeded = false; if (request.getDesiredConfig() != null) { for (ConfigurationRequest desiredConfig : request.getDesiredConfig()) { Map<String, String> requestConfigProperties = desiredConfig.getProperties(); // processing password properties if(requestConfigProperties != null && !requestConfigProperties.isEmpty()) { Map<PropertyInfo.PropertyType, Set<String>> propertiesTypes = cluster.getConfigPropertiesTypes( desiredConfig.getType() ); for (Entry<String, String> property : requestConfigProperties.entrySet()) { String propertyName = property.getKey(); String propertyValue = property.getValue(); if (propertiesTypes.containsKey(PropertyType.PASSWORD) && propertiesTypes.get(PropertyType.PASSWORD).contains(propertyName)) { if (SecretReference.isSecret(propertyValue)) { SecretReference ref = new SecretReference(propertyValue, cluster); requestConfigProperties.put(propertyName, ref.getValue()); } } } } Map<String,Map<String,String>> requestConfigAttributes = desiredConfig.getPropertiesAttributes(); Config clusterConfig = cluster.getDesiredConfigByType(desiredConfig.getType()); Map<String, String> clusterConfigProperties = null; Map<String,Map<String,String>> clusterConfigAttributes = null; if (clusterConfig != null) { clusterConfigProperties = clusterConfig.getProperties(); clusterConfigAttributes = clusterConfig.getPropertiesAttributes(); if (!isAttributeMapsEqual(requestConfigAttributes, clusterConfigAttributes)){ isConfigurationCreationNeeded = true; break; } } else { isConfigurationCreationNeeded = true; break; } if (requestConfigProperties == null || requestConfigProperties.isEmpty()) { Config existingConfig = cluster.getConfig(desiredConfig.getType(), desiredConfig.getVersionTag()); if (existingConfig != null) { if (!StringUtils.equals(existingConfig.getTag(), clusterConfig.getTag())) { isConfigurationCreationNeeded = true; break; } } } if (requestConfigProperties != null && clusterConfigProperties != null) { if (requestConfigProperties.size() != clusterConfigProperties.size()) { isConfigurationCreationNeeded = true; break; } else { if ( cluster.getServiceByConfigType(clusterConfig.getType()) != null && clusterConfig.getServiceConfigVersions().isEmpty() ) { //If there's no service config versions containing this config (except cluster configs), recreate it even if exactly equal LOG.warn("Existing desired config doesn't belong to any service config version, " + "forcing config recreation, " + "clusterName={}, type = {}, tag={}", cluster.getClusterName(), clusterConfig.getType(), clusterConfig.getTag()); isConfigurationCreationNeeded = true; break; } for (Entry<String, String> property : requestConfigProperties.entrySet()) { if (!StringUtils.equals(property.getValue(), clusterConfigProperties.get(property.getKey()))) { isConfigurationCreationNeeded = true; break; } } } } } } // set or create configuration mapping (and optionally create the map of properties) if (isConfigurationCreationNeeded) { List<ConfigurationRequest> desiredConfigs = request.getDesiredConfig(); if (!desiredConfigs.isEmpty()) { Set<Config> configs = new HashSet<Config>(); String note = null; for (ConfigurationRequest cr : desiredConfigs) { String configType = cr.getType(); // If the config type is for a service, then allow a user with SERVICE_MODIFY_CONFIGS to // update, else ensure the user has CLUSTER_MODIFY_CONFIGS String service = null; try { service = cluster.getServiceForConfigTypes(Collections.singleton(configType)); } catch (IllegalArgumentException e) { // Ignore this since we may have hit a config type that spans multiple services. This may // happen in unit test cases but should not happen with later versions of stacks. } if(StringUtils.isEmpty(service)) { if (!AuthorizationHelper.isAuthorized(ResourceType.CLUSTER, cluster.getResourceId(), EnumSet.of(RoleAuthorization.CLUSTER_MODIFY_CONFIGS))) { throw new AuthorizationException("The authenticated user does not have authorization to modify cluster configurations"); } } else { if (!AuthorizationHelper.isAuthorized(ResourceType.CLUSTER, cluster.getResourceId(), EnumSet.of(RoleAuthorization.SERVICE_MODIFY_CONFIGS))) { throw new AuthorizationException("The authenticated user does not have authorization to modify service configurations"); } } if (null != cr.getProperties()) { // !!! empty property sets are supported, and need to be able to use // previously-defined configs (revert) Map<String, Config> all = cluster.getConfigsByType(configType); if (null == all || // none set !all.containsKey(cr.getVersionTag()) || // tag not set cr.getProperties().size() > 0) { // properties to set // Ensure the user is allowed to update all properties validateAuthorizationToUpdateServiceUsersAndGroups(cluster, cr); LOG.info(MessageFormat.format("Applying configuration with tag ''{0}'' to cluster ''{1}'' for configuration type {2}", cr.getVersionTag(), request.getClusterName(), configType)); cr.setClusterName(cluster.getClusterName()); configurationResponses.add(createConfiguration(cr)); } } note = cr.getServiceConfigVersionNote(); configs.add(cluster.getConfig(configType, cr.getVersionTag())); } if (!configs.isEmpty()) { String authName = getAuthName(); serviceConfigVersionResponse = cluster.addDesiredConfig(authName, configs, note); if (serviceConfigVersionResponse != null) { Logger logger = LoggerFactory.getLogger("configchange"); for (Config config : configs) { logger.info("cluster '" + request.getClusterName() + "' " + "changed by: '" + authName + "'; " + "type='" + config.getType() + "' " + "tag='" + config.getTag() + "'"); } } } } } StackId currentVersion = cluster.getCurrentStackVersion(); StackId desiredVersion = cluster.getDesiredStackVersion(); // Set the current version value if its not already set if (currentVersion == null) { if(!AuthorizationHelper.isAuthorized(ResourceType.CLUSTER, cluster.getResourceId(), EnumSet.of(RoleAuthorization.CLUSTER_UPGRADE_DOWNGRADE_STACK))) { throw new AuthorizationException("The authenticated user does not have authorization to modify stack version"); } cluster.setCurrentStackVersion(desiredVersion); } // Stack Upgrade: unlike the workflow for creating a cluster, updating a cluster via the API will not // create any ClusterVersionEntity changes because those have to go through the Stack Upgrade process. boolean requiresHostListUpdate = request.getHostNames() != null && !request.getHostNames().isEmpty(); if (requiresHostListUpdate) { clusters.mapHostsToCluster( request.getHostNames(), request.getClusterName()); } // set the provisioning state of the cluster if (null != request.getProvisioningState()) { State oldProvisioningState = cluster.getProvisioningState(); State provisioningState = State.valueOf(request.getProvisioningState()); if (provisioningState != State.INIT && provisioningState != State.INSTALLED) { LOG.warn( "Invalid cluster provisioning state {} cannot be set on the cluster {}", provisioningState, request.getClusterName()); throw new IllegalArgumentException( "Invalid cluster provisioning state " + provisioningState + " cannot be set on cluster " + request.getClusterName()); } if (provisioningState != oldProvisioningState) { boolean isStateTransitionValid = State.isValidDesiredStateTransition( oldProvisioningState, provisioningState); if (!isStateTransitionValid) { LOG.warn( "Invalid cluster provisioning 2state {} cannot be set on the cluster {} because the current state is {}", provisioningState, request.getClusterName(), oldProvisioningState); throw new AmbariException("Invalid transition for" + " cluster provisioning state" + ", clusterName=" + cluster.getClusterName() + ", clusterId=" + cluster.getClusterId() + ", currentProvisioningState=" + oldProvisioningState + ", newProvisioningState=" + provisioningState); } } cluster.setProvisioningState(provisioningState); } if (null != request.getServiceConfigVersionRequest()) { if(!AuthorizationHelper.isAuthorized(ResourceType.CLUSTER, cluster.getResourceId(), EnumSet.of(RoleAuthorization.SERVICE_MODIFY_CONFIGS))) { throw new AuthorizationException("The authenticated user does not have authorization to modify service configurations"); } ServiceConfigVersionRequest serviceConfigVersionRequest = request.getServiceConfigVersionRequest(); if (StringUtils.isEmpty(serviceConfigVersionRequest.getServiceName()) || null == serviceConfigVersionRequest.getVersion()) { String msg = "Service name and version should be specified in service config version"; LOG.error(msg); throw new IllegalArgumentException(msg); } serviceConfigVersionResponse = cluster.setServiceConfigVersion(serviceConfigVersionRequest.getServiceName(), serviceConfigVersionRequest.getVersion(), getAuthName(), serviceConfigVersionRequest.getNote()); } if (serviceConfigVersionResponse != null) { if (!configurationResponses.isEmpty()) { serviceConfigVersionResponse.setConfigurations(configurationResponses); } ClusterResponse clusterResponse = new ClusterResponse(cluster.getClusterId(), cluster.getClusterName(), null, null, null, null, null, null); Map<String, Collection<ServiceConfigVersionResponse>> map = new HashMap<String, Collection<ServiceConfigVersionResponse>>(); map.put(serviceConfigVersionResponse.getServiceName(), Collections.singletonList(serviceConfigVersionResponse)); clusterResponse.setDesiredServiceConfigVersions(map); //workaround to be able to retrieve update results in resource provider //as this method only expected to return request response saveClusterUpdate(request, clusterResponse); } // set the new security type of the cluster if change is requested SecurityType securityType = request.getSecurityType(); if(securityType != null) { // if any custom operations are valid and requested, the process of executing them should be initiated, // most of the validation logic will be left to the KerberosHelper to avoid polluting the controller if (kerberosHelper.shouldExecuteCustomOperations(securityType, requestProperties)) { if(!AuthorizationHelper.isAuthorized(ResourceType.CLUSTER, cluster.getResourceId(), EnumSet.of(RoleAuthorization.CLUSTER_TOGGLE_KERBEROS))) { throw new AuthorizationException("The authenticated user does not have authorization to perform Kerberos-specific operations"); } try { requestStageContainer = kerberosHelper.executeCustomOperations(cluster, requestProperties, requestStageContainer, kerberosHelper.getManageIdentitiesDirective(requestProperties)); } catch (KerberosOperationException e) { throw new IllegalArgumentException(e.getMessage(), e); } } else { // If force_toggle_kerberos is not specified, null will be returned. Therefore, perform an // equals check to yield true if the result is Boolean.TRUE, otherwise false. boolean forceToggleKerberos = kerberosHelper.getForceToggleKerberosDirective(requestProperties); if (forceToggleKerberos || (cluster.getSecurityType() != securityType)) { LOG.info("Received cluster security type change request from {} to {} (forced: {})", cluster.getSecurityType().name(), securityType.name(), forceToggleKerberos); if ((securityType == SecurityType.KERBEROS) || (securityType == SecurityType.NONE)) { if (!AuthorizationHelper.isAuthorized(ResourceType.CLUSTER, cluster.getResourceId(), EnumSet.of(RoleAuthorization.CLUSTER_TOGGLE_KERBEROS))) { throw new AuthorizationException("The authenticated user does not have authorization to enable or disable Kerberos"); } // Since the security state of the cluster has changed, invoke toggleKerberos to handle // adding or removing Kerberos from the cluster. This may generate multiple stages // or not depending the current state of the cluster. try { requestStageContainer = kerberosHelper.toggleKerberos(cluster, securityType, requestStageContainer, kerberosHelper.getManageIdentitiesDirective(requestProperties)); } catch (KerberosOperationException e) { throw new IllegalArgumentException(e.getMessage(), e); } } else { throw new IllegalArgumentException(String.format("Unexpected security type encountered: %s", securityType.name())); } cluster.setSecurityType(securityType); } } } if (requestStageContainer != null) { requestStageContainer.persist(); return requestStageContainer.getRequestStatusResponse(); } else { return null; } } /** * Comparison of two attributes maps * @param requestConfigAttributes - attribute map sent from API * @param clusterConfigAttributes - existed attribute map * @return true if maps is equal (have the same attributes and their values) */ public boolean isAttributeMapsEqual(Map<String, Map<String, String>> requestConfigAttributes, Map<String, Map<String, String>> clusterConfigAttributes) { boolean isAttributesEqual = true; if ((requestConfigAttributes != null && clusterConfigAttributes == null) || (requestConfigAttributes == null && clusterConfigAttributes != null) || (requestConfigAttributes != null && clusterConfigAttributes != null && !requestConfigAttributes.keySet().equals(clusterConfigAttributes.keySet()))) { return false; } else if (clusterConfigAttributes != null && requestConfigAttributes != null) { for (Entry<String, Map<String, String>> ClusterEntrySet : clusterConfigAttributes.entrySet()) { Map<String, String> clusterMapAttributes = ClusterEntrySet.getValue(); Map<String, String> requestMapAttributes = requestConfigAttributes.get(ClusterEntrySet.getKey()); if ((requestMapAttributes != null && clusterMapAttributes == null) || (requestMapAttributes == null && clusterMapAttributes != null) || (requestMapAttributes != null && clusterMapAttributes != null && !requestMapAttributes.keySet().equals(clusterMapAttributes.keySet()))) { return false; } else if (requestMapAttributes != null && clusterMapAttributes != null) { for (Entry<String, String> requestPropertyEntrySet : requestMapAttributes.entrySet()) { String requestPropertyValue = requestPropertyEntrySet.getValue(); String clusterPropertyValue = clusterMapAttributes.get(requestPropertyEntrySet.getKey()); if ((requestPropertyValue != null && clusterPropertyValue == null) || (requestPropertyValue == null && clusterPropertyValue != null) || (requestPropertyValue != null && clusterPropertyValue != null && !requestPropertyValue.equals(clusterPropertyValue))) { return false; } } } } } return isAttributesEqual; } /** * Save cluster update results to retrieve later * @param clusterRequest cluster request info * @param clusterResponse cluster response info */ public void saveClusterUpdate(ClusterRequest clusterRequest, ClusterResponse clusterResponse) { clusterUpdateCache.put(clusterRequest, clusterResponse); } @Override public ClusterResponse getClusterUpdateResults(ClusterRequest clusterRequest) { return clusterUpdateCache.getIfPresent(clusterRequest); } @Override public String getJobTrackerHost(Cluster cluster) { try { Service svc = cluster.getService("MAPREDUCE"); ServiceComponent sc = svc.getServiceComponent(Role.JOBTRACKER.toString()); if (sc.getServiceComponentHosts() != null && !sc.getServiceComponentHosts().isEmpty()) { return sc.getServiceComponentHosts().keySet().iterator().next(); } } catch (AmbariException ex) { return null; } return null; } private Set<String> getServicesForSmokeTests(Cluster cluster, Map<State, List<Service>> changedServices, Map<String, Map<State, List<ServiceComponentHost>>> changedScHosts, boolean runSmokeTest) throws AmbariException { // We choose the most general (high-level) op level here. As a result, // service checks will be only launched for services/components that // are not in a Maintenance state. Resource.Type opLvl = Resource.Type.Cluster; Set<String> smokeTestServices = new HashSet<String>(); // Adding smoke checks for changed services if (changedServices != null) { for (Entry<State, List<Service>> entry : changedServices.entrySet()) { if (State.STARTED != entry.getKey()) { continue; } for (Service s : entry.getValue()) { if (runSmokeTest && (State.INSTALLED == s.getDesiredState() && maintenanceStateHelper.isOperationAllowed(opLvl, s))) { smokeTestServices.add(s.getName()); } } } } // Adding smoke checks for changed host components Map<String, Map<String, Integer>> changedComponentCount = new HashMap<String, Map<String, Integer>>(); for (Map<State, List<ServiceComponentHost>> stateScHostMap : changedScHosts.values()) { for (Entry<State, List<ServiceComponentHost>> entry : stateScHostMap.entrySet()) { if (State.STARTED != entry.getKey()) { continue; } for (ServiceComponentHost sch : entry.getValue()) { if (State.INSTALLED != sch.getState()) { continue; } if (! maintenanceStateHelper.isOperationAllowed(opLvl, sch)) { continue; } if (!changedComponentCount.containsKey(sch.getServiceName())) { changedComponentCount.put(sch.getServiceName(), new HashMap<String, Integer>()); } if (!changedComponentCount.get(sch.getServiceName()) .containsKey(sch.getServiceComponentName())) { changedComponentCount.get(sch.getServiceName()) .put(sch.getServiceComponentName(), 1); } else { Integer i = changedComponentCount.get(sch.getServiceName()) .get(sch.getServiceComponentName()); changedComponentCount.get(sch.getServiceName()) .put(sch.getServiceComponentName(), ++i); } } } } // Add service checks for any changed master component hosts or if // more then one component has been changed for a service for (Entry<String, Map<String, Integer>> entry : changedComponentCount.entrySet()) { String serviceName = entry.getKey(); Service s = cluster.getService(serviceName); // smoke test service if more than one component is started if (runSmokeTest && (entry.getValue().size() > 1) && maintenanceStateHelper.isOperationAllowed(opLvl, s)) { smokeTestServices.add(serviceName); continue; } for (String componentName : changedComponentCount.get(serviceName).keySet()) { ServiceComponent sc = cluster.getService(serviceName). getServiceComponent(componentName); StackId stackId = sc.getDesiredStackVersion(); ComponentInfo compInfo = ambariMetaInfo.getComponent( stackId.getStackName(), stackId.getStackVersion(), serviceName, componentName); if (runSmokeTest && compInfo.isMaster() && // op lvl handling for service component // is the same as for service maintenanceStateHelper.isOperationAllowed(opLvl, s)) { smokeTestServices.add(serviceName); } // FIXME if master check if we need to run a smoke test for the master } } return smokeTestServices; } private void addClientSchForReinstall(Cluster cluster, Map<State, List<Service>> changedServices, Map<String, Map<State, List<ServiceComponentHost>>> changedScHosts) throws AmbariException { Set<String> services = new HashSet<String>(); // This is done to account for services with client only components. if (changedServices != null) { for (Entry<State, List<Service>> entry : changedServices.entrySet()) { if (State.STARTED != entry.getKey()) { continue; } for (Service s : entry.getValue()) { if (State.INSTALLED == s.getDesiredState()) { services.add(s.getName()); } } } } // Flatten changed Schs that are going to be Started List<ServiceComponentHost> serviceComponentHosts = new ArrayList<ServiceComponentHost>(); if (changedScHosts != null && !changedScHosts.isEmpty()) { for (Entry<String, Map<State, List<ServiceComponentHost>>> stringMapEntry : changedScHosts.entrySet()) { for (State state : stringMapEntry.getValue().keySet()) { if (state == State.STARTED) { serviceComponentHosts.addAll(stringMapEntry.getValue().get(state)); } } } } if (!serviceComponentHosts.isEmpty()) { for (ServiceComponentHost sch : serviceComponentHosts) { services.add(sch.getServiceName()); } } if (services.isEmpty()) { return; } Map<String, List<ServiceComponentHost>> clientSchs = new HashMap<String, List<ServiceComponentHost>>(); for (String serviceName : services) { Service s = cluster.getService(serviceName); for (String component : s.getServiceComponents().keySet()) { List<ServiceComponentHost> potentialHosts = new ArrayList<ServiceComponentHost>(); ServiceComponent sc = s.getServiceComponents().get(component); if (sc.isClientComponent()) { for (ServiceComponentHost potentialSch : sc.getServiceComponentHosts().values()) { Host host = clusters.getHost(potentialSch.getHostName()); // Host is alive and neither host nor SCH is in Maintenance State if (!potentialSch.getHostState().equals(HostState.HEARTBEAT_LOST) && potentialSch.getMaintenanceState() != MaintenanceState.ON && host.getMaintenanceState(cluster.getClusterId()) == MaintenanceState.OFF) { potentialHosts.add(potentialSch); } } } if (!potentialHosts.isEmpty()) { clientSchs.put(sc.getName(), potentialHosts); } } } LOG.info("Client hosts for reinstall : " + clientSchs.size()); if (changedScHosts != null) { for (Entry<String, List<ServiceComponentHost>> stringListEntry : clientSchs.entrySet()) { Map<State, List<ServiceComponentHost>> schMap = new EnumMap<State, List<ServiceComponentHost>>(State.class); schMap.put(State.INSTALLED, stringListEntry.getValue()); changedScHosts.put(stringListEntry.getKey(), schMap); } } } @Override public Map<String, Map<String,String>> findConfigurationTagsWithOverrides( Cluster cluster, String hostName) throws AmbariException { return configHelper.getEffectiveDesiredTags(cluster, hostName); } @Override public RequestExecutionFactory getRequestExecutionFactory() { return requestExecutionFactory; } @Override public ExecutionScheduleManager getExecutionScheduleManager() { return executionScheduleManager; } /** * Creates and populates an EXECUTION_COMMAND for host */ private void createHostAction(Cluster cluster, Stage stage, ServiceComponentHost scHost, Map<String, Map<String, String>> configurations, Map<String, Map<String, Map<String, String>>> configurationAttributes, Map<String, Map<String, String>> configTags, RoleCommand roleCommand, Map<String, String> commandParamsInp, ServiceComponentHostEvent event ) throws AmbariException { String serviceName = scHost.getServiceName(); stage.addHostRoleExecutionCommand(scHost.getHost(), Role.valueOf(scHost.getServiceComponentName()), roleCommand, event, cluster, serviceName, false, false); String componentName = scHost.getServiceComponentName(); String hostname = scHost.getHostName(); String osFamily = clusters.getHost(hostname).getOsFamily(); StackId stackId = cluster.getDesiredStackVersion(); ServiceInfo serviceInfo = ambariMetaInfo.getService(stackId.getStackName(), stackId.getStackVersion(), serviceName); ComponentInfo componentInfo = ambariMetaInfo.getComponent( stackId.getStackName(), stackId.getStackVersion(), serviceName, componentName); StackInfo stackInfo = ambariMetaInfo.getStack(stackId.getStackName(), stackId.getStackVersion()); ExecutionCommand execCmd = stage.getExecutionCommandWrapper(scHost.getHostName(), scHost.getServiceComponentName()).getExecutionCommand(); Host host = clusters.getHost(scHost.getHostName()); execCmd.setConfigurations(configurations); execCmd.setConfigurationAttributes(configurationAttributes); execCmd.setConfigurationTags(configTags); // Create a local copy for each command Map<String, String> commandParams = new TreeMap<String, String>(); if (commandParamsInp != null) { // if not defined commandParams.putAll(commandParamsInp); } //Propogate HCFS service type info Iterator<Service> it = cluster.getServices().values().iterator(); while(it.hasNext()) { ServiceInfo serviceInfoInstance = ambariMetaInfo.getService(stackId.getStackName(),stackId.getStackVersion(), it.next().getName()); LOG.info("Iterating service type Instance in createHostAction:: " + serviceInfoInstance.getName()); if(serviceInfoInstance.getServiceType() != null) { LOG.info("Adding service type info in createHostAction:: " + serviceInfoInstance.getServiceType()); commandParams.put("dfs_type",serviceInfoInstance.getServiceType()); break; } } boolean isInstallCommand = roleCommand.equals(RoleCommand.INSTALL); String agentDefaultCommandTimeout = configs.getDefaultAgentTaskTimeout(isInstallCommand); String scriptCommandTimeout = ""; /* * This script is only used for * default commands like INSTALL/STOP/START */ CommandScriptDefinition script = componentInfo.getCommandScript(); if (serviceInfo.getSchemaVersion().equals(AmbariMetaInfo.SCHEMA_VERSION_2)) { if (script != null) { commandParams.put(SCRIPT, script.getScript()); commandParams.put(SCRIPT_TYPE, script.getScriptType().toString()); boolean retryEnabled = false; Integer retryMaxTime = 0; if (commandParams.containsKey(CLUSTER_PHASE_PROPERTY) && (commandParams.get(CLUSTER_PHASE_PROPERTY).equals(CLUSTER_PHASE_INITIAL_INSTALL) || commandParams.get(CLUSTER_PHASE_PROPERTY).equals(CLUSTER_PHASE_INITIAL_START))) { String retryEnabledStr = configHelper.getValueFromDesiredConfigurations(cluster, ConfigHelper.CLUSTER_ENV, ConfigHelper.CLUSTER_ENV_RETRY_ENABLED); String commandsStr = configHelper.getValueFromDesiredConfigurations(cluster, ConfigHelper.CLUSTER_ENV, ConfigHelper.CLUSTER_ENV_RETRY_COMMANDS); String retryMaxTimeStr = configHelper.getValueFromDesiredConfigurations(cluster, ConfigHelper.CLUSTER_ENV, ConfigHelper.CLUSTER_ENV_RETRY_MAX_TIME_IN_SEC); if (StringUtils.isNotEmpty(retryEnabledStr)) { retryEnabled = Boolean.TRUE.toString().equals(retryEnabledStr); } if (retryEnabled) { retryMaxTime = NumberUtils.toInt(retryMaxTimeStr, 0); if (retryMaxTime < 0) { retryMaxTime = 0; } if (StringUtils.isNotEmpty(commandsStr)) { boolean commandMayBeRetried = false; String[] commands = commandsStr.split(","); for (String command : commands) { if (roleCommand.toString().equals(command.trim())) { commandMayBeRetried = true; } } retryEnabled = commandMayBeRetried; } } LOG.info("Auto retry setting for {}-{} on {} is retryEnabled={} and retryMaxTime={}", serviceName, componentName, scHost.getHostName(), retryEnabled, retryMaxTime); } commandParams.put(MAX_DURATION_OF_RETRIES, Integer.toString(retryMaxTime)); commandParams.put(COMMAND_RETRY_ENABLED, Boolean.toString(retryEnabled)); ClusterVersionEntity effectiveClusterVersion = cluster.getEffectiveClusterVersion(); if (effectiveClusterVersion != null) { commandParams.put(VERSION, effectiveClusterVersion.getRepositoryVersion().getVersion()); } if (script.getTimeout() > 0) { scriptCommandTimeout = String.valueOf(script.getTimeout()); } } else { String message = String.format("Component %s of service %s has no " + "command script defined", componentName, serviceName); throw new AmbariException(message); } } String actualTimeout = (!scriptCommandTimeout.equals("") ? scriptCommandTimeout : agentDefaultCommandTimeout); // Because the INSTALL command can take much longer than typical commands, set the timeout to be the max // between the script's service component timeout and the agent default timeout. if (roleCommand.equals(RoleCommand.INSTALL) && !agentDefaultCommandTimeout.equals("") && Integer.parseInt(actualTimeout) < Integer.parseInt(agentDefaultCommandTimeout)) { actualTimeout = agentDefaultCommandTimeout; } commandParams.put(COMMAND_TIMEOUT, actualTimeout); commandParams.put(SERVICE_PACKAGE_FOLDER, serviceInfo.getServicePackageFolder()); commandParams.put(HOOKS_FOLDER, stackInfo.getStackHooksFolder()); String clusterName = cluster.getClusterName(); if (customCommandExecutionHelper.isTopologyRefreshRequired(roleCommand.name(), clusterName, serviceName)) { commandParams.put(ExecutionCommand.KeyNames.REFRESH_TOPOLOGY, "True"); } execCmd.setCommandParams(commandParams); String repoInfo = customCommandExecutionHelper.getRepoInfo(cluster, host); if (LOG.isDebugEnabled()) { LOG.debug("Sending repo information to agent" + ", hostname=" + scHost.getHostName() + ", clusterName=" + clusterName + ", stackInfo=" + stackId.getStackId() + ", repoInfo=" + repoInfo); } Map<String, String> hostParams = new TreeMap<String, String>(); hostParams.put(REPO_INFO, repoInfo); hostParams.putAll(getRcaParameters()); // use the effective cluster version here since this command might happen // in the context of an upgrade and we should send the repo ID which matches // the version being send down RepositoryVersionEntity repoVersion = null; ClusterVersionEntity effectiveClusterVersion = cluster.getEffectiveClusterVersion(); if (null != effectiveClusterVersion) { repoVersion = effectiveClusterVersion.getRepositoryVersion(); } else { List<ClusterVersionEntity> list = clusterVersionDAO.findByClusterAndState(cluster.getClusterName(), RepositoryVersionState.INIT); if (1 == list.size()) { repoVersion = list.get(0).getRepositoryVersion(); } } if (null != repoVersion) { try { VersionDefinitionXml xml = repoVersion.getRepositoryXml(); if (null != xml && !StringUtils.isBlank(xml.release.packageVersion)) { hostParams.put(PACKAGE_VERSION, xml.release.packageVersion); } } catch (Exception e) { throw new AmbariException(String.format("Could not load version xml from repo version %s", repoVersion.getVersion()), e); } hostParams.put(KeyNames.REPO_VERSION_ID, repoVersion.getId().toString()); } List<ServiceOsSpecific.Package> packages = getPackagesForServiceHost(serviceInfo, hostParams, osFamily); String packageList = gson.toJson(packages); hostParams.put(PACKAGE_LIST, packageList); Set<String> userSet = configHelper.getPropertyValuesWithPropertyType(stackId, PropertyType.USER, cluster); String userList = gson.toJson(userSet); hostParams.put(USER_LIST, userList); Set<String> groupSet = configHelper.getPropertyValuesWithPropertyType(stackId, PropertyType.GROUP, cluster); String groupList = gson.toJson(groupSet); hostParams.put(GROUP_LIST, groupList); Set<String> notManagedHdfsPathSet = configHelper.getPropertyValuesWithPropertyType(stackId, PropertyType.NOT_MANAGED_HDFS_PATH, cluster); String notManagedHdfsPathList = gson.toJson(notManagedHdfsPathSet); hostParams.put(NOT_MANAGED_HDFS_PATH_LIST, notManagedHdfsPathList); DatabaseType databaseType = configs.getDatabaseType(); if (databaseType == DatabaseType.ORACLE) { hostParams.put(DB_DRIVER_FILENAME, configs.getOjdbcJarName()); } else if (databaseType == DatabaseType.MYSQL) { hostParams.put(DB_DRIVER_FILENAME, configs.getMySQLJarName()); } List<String> clientsToUpdateConfigsList = componentInfo.getClientsToUpdateConfigs(); if (clientsToUpdateConfigsList == null) { clientsToUpdateConfigsList = new ArrayList<String>(); clientsToUpdateConfigsList.add("*"); } String clientsToUpdateConfigs = gson.toJson(clientsToUpdateConfigsList); hostParams.put(CLIENTS_TO_UPDATE_CONFIGS, clientsToUpdateConfigs); execCmd.setHostLevelParams(hostParams); Map<String, String> roleParams = new TreeMap<String, String>(); // !!! consistent with where custom commands put variables // !!! after-INSTALL hook checks this such that the stack selection tool won't // select-all to a version that is not being upgraded, breaking RU if (cluster.isUpgradeSuspended()) { roleParams.put(KeyNames.UPGRADE_SUSPENDED, Boolean.TRUE.toString().toLowerCase()); } execCmd.setRoleParams(roleParams); if ((execCmd != null) && (execCmd.getConfigurationTags().containsKey("cluster-env"))) { LOG.info("AmbariManagementControllerImpl.createHostAction: created ExecutionCommand for host {}, role {}, roleCommand {}, and command ID {}, with cluster-env tags {}", execCmd.getHostname(), execCmd.getRole(), execCmd.getRoleCommand(), execCmd.getCommandId(), execCmd.getConfigurationTags().get("cluster-env").get("tag")); } } /** * Computes os-dependent packages for service/host. Does not take into * account package dependencies for ANY_OS. Instead of this method * you should use getPackagesForServiceHost() * because it takes into account both os-dependent and os-independent lists * of packages for service. * @param hostParams may be modified (appended SERVICE_REPO_INFO) * @return a list of os-dependent packages for host */ protected ServiceOsSpecific populateServicePackagesInfo(ServiceInfo serviceInfo, Map<String, String> hostParams, String osFamily) { ServiceOsSpecific hostOs = new ServiceOsSpecific(osFamily); List<ServiceOsSpecific> foundOSSpecifics = getOSSpecificsByFamily(serviceInfo.getOsSpecifics(), osFamily); if (!foundOSSpecifics.isEmpty()) { for (ServiceOsSpecific osSpecific : foundOSSpecifics) { hostOs.addPackages(osSpecific.getPackages()); } // Choose repo that is relevant for host ServiceOsSpecific.Repo serviceRepo = hostOs.getRepo(); if (serviceRepo != null) { String serviceRepoInfo = gson.toJson(serviceRepo); hostParams.put(SERVICE_REPO_INFO, serviceRepoInfo); } } return hostOs; } @Override public List<ServiceOsSpecific.Package> getPackagesForServiceHost(ServiceInfo serviceInfo, Map<String, String> hostParams, String osFamily) { // Write down os specific info for the service ServiceOsSpecific anyOs = null; if (serviceInfo.getOsSpecifics().containsKey(AmbariMetaInfo.ANY_OS)) { anyOs = serviceInfo.getOsSpecifics().get(AmbariMetaInfo.ANY_OS); } ServiceOsSpecific hostOs = populateServicePackagesInfo(serviceInfo, hostParams, osFamily); // Build package list that is relevant for host List<ServiceOsSpecific.Package> packages = new ArrayList<ServiceOsSpecific.Package>(); if (anyOs != null) { packages.addAll(anyOs.getPackages()); } if (hostOs != null) { packages.addAll(hostOs.getPackages()); } return packages; } private List<ServiceOsSpecific> getOSSpecificsByFamily(Map<String, ServiceOsSpecific> osSpecifics, String osFamily) { List<ServiceOsSpecific> foundedOSSpecifics = new ArrayList<ServiceOsSpecific>(); for (Entry<String, ServiceOsSpecific> osSpecific : osSpecifics.entrySet()) { if (osSpecific.getKey().contains(osFamily)) { foundedOSSpecifics.add(osSpecific.getValue()); } } return foundedOSSpecifics; } private ActionExecutionContext getActionExecutionContext (ExecuteActionRequest actionRequest) throws AmbariException { RequestOperationLevel operationLevel = actionRequest.getOperationLevel(); if (actionRequest.isCommand()) { ActionExecutionContext actionExecutionContext = new ActionExecutionContext(actionRequest.getClusterName(), actionRequest.getCommandName(), actionRequest.getResourceFilters(), actionRequest.getParameters()); actionExecutionContext.setOperationLevel(operationLevel); return actionExecutionContext; } else { // If action ActionDefinition actionDef = ambariMetaInfo.getActionDefinition(actionRequest.getActionName()); if (actionDef == null) { throw new AmbariException( "Action " + actionRequest.getActionName() + " does not exist"); } ActionExecutionContext actionExecutionContext = new ActionExecutionContext(actionRequest.getClusterName(), actionRequest.getActionName(), actionRequest.getResourceFilters(), actionRequest.getParameters(), actionDef.getTargetType(), actionDef.getDefaultTimeout(), actionDef.getTargetService(), actionDef.getTargetComponent()); actionExecutionContext.setOperationLevel(operationLevel); return actionExecutionContext; } } private RequestStageContainer doStageCreation(RequestStageContainer requestStages, Cluster cluster, Map<State, List<Service>> changedServices, Map<State, List<ServiceComponent>> changedComps, Map<String, Map<State, List<ServiceComponentHost>>> changedScHosts, Map<String, String> requestParameters, Map<String, String> requestProperties, boolean runSmokeTest, boolean reconfigureClients) throws AmbariException { // TODO handle different transitions? // Say HDFS to stopped and MR to started, what order should actions be done // in? // TODO additional validation? // verify all configs // verify all required components if ((changedServices == null || changedServices.isEmpty()) && (changedComps == null || changedComps.isEmpty()) && (changedScHosts == null || changedScHosts.isEmpty())) { LOG.debug("Created 0 stages"); return requestStages; } // smoke test any service that goes from installed to started Set<String> smokeTestServices = getServicesForSmokeTests(cluster, changedServices, changedScHosts, runSmokeTest); if (reconfigureClients) { // Re-install client only hosts to reattach changed configs on service // restart addClientSchForReinstall(cluster, changedServices, changedScHosts); } if (!changedScHosts.isEmpty() || !smokeTestServices.isEmpty()) { long nowTimestamp = System.currentTimeMillis(); // FIXME cannot work with a single stage // multiple stages may be needed for reconfigure Map<String, Set<String>> clusterHostInfo = StageUtils.getClusterHostInfo(cluster); String clusterHostInfoJson = StageUtils.getGson().toJson(clusterHostInfo); String hostParamsJson = StageUtils.getGson().toJson( customCommandExecutionHelper.createDefaultHostParams(cluster)); Stage stage = createNewStage(requestStages.getLastStageId(), cluster, requestStages.getId(), requestProperties.get(REQUEST_CONTEXT_PROPERTY), clusterHostInfoJson, "{}", hostParamsJson); Collection<ServiceComponentHost> componentsToEnableKerberos = new ArrayList<ServiceComponentHost>(); Set<String> hostsToForceKerberosOperations = new HashSet<String>(); for (String compName : changedScHosts.keySet()) { for (State newState : changedScHosts.get(compName).keySet()) { for (ServiceComponentHost scHost : changedScHosts.get(compName).get(newState)) { // Do not create role command for hosts that are not responding if (scHost.getHostState().equals(HostState.HEARTBEAT_LOST)) { LOG.info("Command is not created for servicecomponenthost " + ", clusterName=" + cluster.getClusterName() + ", clusterId=" + cluster.getClusterId() + ", serviceName=" + scHost.getServiceName() + ", componentName=" + scHost.getServiceComponentName() + ", hostname=" + scHost.getHostName() + ", hostState=" + scHost.getHostState() + ", targetNewState=" + newState); continue; } RoleCommand roleCommand; State oldSchState = scHost.getState(); ServiceComponentHostEvent event; switch (newState) { case INSTALLED: if (oldSchState == State.INIT || oldSchState == State.UNINSTALLED || oldSchState == State.INSTALLED || oldSchState == State.INSTALLING || oldSchState == State.UNKNOWN || oldSchState == State.INSTALL_FAILED) { roleCommand = RoleCommand.INSTALL; event = new ServiceComponentHostInstallEvent( scHost.getServiceComponentName(), scHost.getHostName(), nowTimestamp, scHost.getDesiredStackVersion().getStackId()); // If the state is transitioning from INIT TO INSTALLED and the cluster has Kerberos // enabled, mark this ServiceComponentHost to see if anything needs to be done to // make sure it is properly configured. // If the component is transitioning from an INSTALL_FAILED to an INSTALLED state indicates a failure attempt on install // followed by a new installation attempt and will also need Kerberos related configuration addressing // The Kerberos-related stages needs to be // between the INSTALLED and STARTED states because some services need to set up // the host (i,e, create user accounts, etc...) before Kerberos-related tasks an // occur (like distribute keytabs) if((oldSchState == State.INIT || oldSchState == State.INSTALL_FAILED) && kerberosHelper.isClusterKerberosEnabled(cluster)) { // check if host component already exists, if it exists no need to reset kerberos configs // check if it's blueprint install. If it is, then do not call kerberos.configureService if (!hostComponentAlreadyExists(cluster, scHost) && !("INITIAL_INSTALL".equals(requestProperties.get("phase")))) { try { kerberosHelper.configureService(cluster, scHost); } catch (KerberosInvalidConfigurationException e) { throw new AmbariException(e.getMessage(), e); } } componentsToEnableKerberos.add(scHost); if(Service.Type.KERBEROS.name().equalsIgnoreCase(scHost.getServiceName()) && Role.KERBEROS_CLIENT.name().equalsIgnoreCase(scHost.getServiceComponentName())) { // Since the KERBEROS/KERBEROS_CLIENT is about to be moved from the INIT to the // INSTALLED state (and it should be by the time the stages (in this request) // that need to be execute), collect the relevant hostname to make sure the // Kerberos logic doest not skip operations for it. hostsToForceKerberosOperations.add(scHost.getHostName()); } } } else if (oldSchState == State.STARTED // TODO: oldSchState == State.INSTALLED is always false, looks like a bug //|| oldSchState == State.INSTALLED || oldSchState == State.STOPPING) { roleCommand = RoleCommand.STOP; event = new ServiceComponentHostStopEvent( scHost.getServiceComponentName(), scHost.getHostName(), nowTimestamp); } else if (oldSchState == State.UPGRADING) { roleCommand = RoleCommand.UPGRADE; event = new ServiceComponentHostUpgradeEvent( scHost.getServiceComponentName(), scHost.getHostName(), nowTimestamp, scHost.getDesiredStackVersion().getStackId()); } else { throw new AmbariException("Invalid transition for" + " servicecomponenthost" + ", clusterName=" + cluster.getClusterName() + ", clusterId=" + cluster.getClusterId() + ", serviceName=" + scHost.getServiceName() + ", componentName=" + scHost.getServiceComponentName() + ", hostname=" + scHost.getHostName() + ", currentState=" + oldSchState + ", newDesiredState=" + newState); } break; case STARTED: StackId stackId = scHost.getDesiredStackVersion(); ComponentInfo compInfo = ambariMetaInfo.getComponent( stackId.getStackName(), stackId.getStackVersion(), scHost.getServiceName(), scHost.getServiceComponentName()); if (oldSchState == State.INSTALLED || oldSchState == State.STARTING || //todo: after separating install and start, the install stage is no longer in request stage container //todo: so projected state will not equal INSTALLED which causes an exception for invalid state transition //todo: so for now disabling this check //todo: this change breaks test AmbariManagementControllerTest.testServiceComponentHostUpdateRecursive() true) { // requestStages.getProjectedState(scHost.getHostName(), // scHost.getServiceComponentName()) == State.INSTALLED) { roleCommand = RoleCommand.START; event = new ServiceComponentHostStartEvent( scHost.getServiceComponentName(), scHost.getHostName(), nowTimestamp); } else { String error = "Invalid transition for" + " servicecomponenthost" + ", clusterName=" + cluster.getClusterName() + ", clusterId=" + cluster.getClusterId() + ", serviceName=" + scHost.getServiceName() + ", componentName=" + scHost.getServiceComponentName() + ", hostname=" + scHost.getHostName() + ", currentState=" + oldSchState + ", newDesiredState=" + newState; if (compInfo.isMaster()) { throw new AmbariException(error); } else { LOG.info("Ignoring: " + error); continue; } } break; case UNINSTALLED: if (oldSchState == State.INSTALLED || oldSchState == State.UNINSTALLING) { roleCommand = RoleCommand.UNINSTALL; event = new ServiceComponentHostStartEvent( scHost.getServiceComponentName(), scHost.getHostName(), nowTimestamp); } else { throw new AmbariException("Invalid transition for" + " servicecomponenthost" + ", clusterName=" + cluster.getClusterName() + ", clusterId=" + cluster.getClusterId() + ", serviceName=" + scHost.getServiceName() + ", componentName=" + scHost.getServiceComponentName() + ", hostname=" + scHost.getHostName() + ", currentState=" + oldSchState + ", newDesiredState=" + newState); } break; case INIT: throw new AmbariException("Unsupported transition to INIT for" + " servicecomponenthost" + ", clusterName=" + cluster.getClusterName() + ", clusterId=" + cluster.getClusterId() + ", serviceName=" + scHost.getServiceName() + ", componentName=" + scHost.getServiceComponentName() + ", hostname=" + scHost.getHostName() + ", currentState=" + oldSchState + ", newDesiredState=" + newState); default: throw new AmbariException("Unsupported state change operation" + ", newState=" + newState.toString()); } if (LOG.isDebugEnabled()) { LOG.debug("Create a new host action" + ", requestId=" + requestStages.getId() + ", componentName=" + scHost.getServiceComponentName() + ", hostname=" + scHost.getHostName() + ", roleCommand=" + roleCommand.name()); } // any targeted information String keyName = scHost.getServiceComponentName().toLowerCase(); if (requestProperties.containsKey(keyName)) { // in the case where the command is targeted, but the states // of the old and new are the same, the targeted component // may still need to get the command. This is true for Flume. if (oldSchState == newState) { switch (oldSchState) { case INSTALLED: roleCommand = RoleCommand.STOP; event = new ServiceComponentHostStopEvent( scHost.getServiceComponentName(), scHost.getHostName(), nowTimestamp); break; case STARTED: roleCommand = RoleCommand.START; event = new ServiceComponentHostStartEvent( scHost.getServiceComponentName(), scHost.getHostName(), nowTimestamp); break; default: break; } } if (null == requestParameters) { requestParameters = new HashMap<String, String>(); } requestParameters.put(keyName, requestProperties.get(keyName)); } if (requestProperties.containsKey(CLUSTER_PHASE_PROPERTY)) { if (null == requestParameters) { requestParameters = new HashMap<String, String>(); } requestParameters.put(CLUSTER_PHASE_PROPERTY, requestProperties.get(CLUSTER_PHASE_PROPERTY)); } Map<String, Map<String, String>> configurations = new TreeMap<String, Map<String, String>>(); Map<String, Map<String, Map<String, String>>> configurationAttributes = new TreeMap<String, Map<String, Map<String, String>>>(); Host host = clusters.getHost(scHost.getHostName()); Map<String, Map<String, String>> configTags = findConfigurationTagsWithOverrides(cluster, host.getHostName()); createHostAction(cluster, stage, scHost, configurations, configurationAttributes, configTags, roleCommand, requestParameters, event); } } } for (String serviceName : smokeTestServices) { // Creates smoke test commands Service s = cluster.getService(serviceName); // find service component host ServiceComponent component = getClientComponentForRunningAction(cluster, s); String componentName = component != null ? component.getName() : null; String clientHost = getClientHostForRunningAction(cluster, s, component); String smokeTestRole = actionMetadata.getServiceCheckAction(serviceName); if (clientHost == null || smokeTestRole == null) { LOG.info("Nothing to do for service check as could not find role or" + " or host to run check on" + ", clusterName=" + cluster.getClusterName() + ", serviceName=" + serviceName + ", clientHost=" + clientHost + ", serviceCheckRole=" + smokeTestRole); continue; } customCommandExecutionHelper.addServiceCheckAction(stage, clientHost, smokeTestRole, nowTimestamp, serviceName, componentName, null, false, false); } RoleCommandOrder rco = getRoleCommandOrder(cluster); RoleGraph rg = roleGraphFactory.createNew(rco); rg.build(stage); requestStages.addStages(rg.getStages()); if (!componentsToEnableKerberos.isEmpty()) { Map<String, Collection<String>> serviceFilter = new HashMap<String, Collection<String>>(); Set<String> hostFilter = new HashSet<String>(); for (ServiceComponentHost scHost : componentsToEnableKerberos) { String serviceName = scHost.getServiceName(); Collection<String> componentFilter = serviceFilter.get(serviceName); if (componentFilter == null) { componentFilter = new HashSet<String>(); serviceFilter.put(serviceName, componentFilter); } componentFilter.add(scHost.getServiceComponentName()); hostFilter.add(scHost.getHostName()); } try { kerberosHelper.ensureIdentities(cluster, serviceFilter, hostFilter, null, hostsToForceKerberosOperations, requestStages, kerberosHelper.getManageIdentitiesDirective(requestProperties)); } catch (KerberosOperationException e) { throw new IllegalArgumentException(e.getMessage(), e); } } List<Stage> stages = requestStages.getStages(); LOG.debug("Created {} stages", ((stages != null) ? stages.size() : 0)); } else { LOG.debug("Created 0 stages"); } return requestStages; } private boolean hostComponentAlreadyExists(Cluster cluster, ServiceComponentHost sch) throws AmbariException { Service service = cluster.getService(sch.getServiceName()); if (service != null) { ServiceComponent serviceComponent = service.getServiceComponent(sch.getServiceComponentName()); if (serviceComponent != null) { Map<String, ServiceComponentHost> serviceComponentHostMap = serviceComponent.getServiceComponentHosts(); for (ServiceComponentHost serviceComponentHost : serviceComponentHostMap.values()) { if (serviceComponentHost.getState() == State.INSTALLED || serviceComponentHost.getState() == State.STARTED) { return true; } } } } return false; } @Override public ExecutionCommand getExecutionCommand(Cluster cluster, ServiceComponentHost scHost, RoleCommand roleCommand) throws AmbariException { Map<String, Set<String>> clusterHostInfo = StageUtils.getClusterHostInfo(cluster); String clusterHostInfoJson = StageUtils.getGson().toJson(clusterHostInfo); Map<String, String> hostParamsCmd = customCommandExecutionHelper.createDefaultHostParams(cluster); Stage stage = createNewStage(0, cluster, 1, "", clusterHostInfoJson, "{}", ""); Map<String, Map<String, String>> configTags = configHelper.getEffectiveDesiredTags(cluster, scHost.getHostName()); Map<String, Map<String, String>> configurations = configHelper.getEffectiveConfigProperties(cluster, configTags); Map<String, Map<String, Map<String, String>>> configurationAttributes = new TreeMap<String, Map<String, Map<String, String>>>(); createHostAction(cluster, stage, scHost, configurations, configurationAttributes, configTags, roleCommand, null, null); ExecutionCommand ec = stage.getExecutionCommands().get(scHost.getHostName()).get(0).getExecutionCommand(); // createHostAction does not take a hostLevelParams but creates one hostParamsCmd.putAll(ec.getHostLevelParams()); ec.getHostLevelParams().putAll(hostParamsCmd); ec.setClusterHostInfo( StageUtils.getClusterHostInfo(cluster)); if (null != cluster) { // Generate localComponents for (ServiceComponentHost sch : cluster.getServiceComponentHosts(scHost.getHostName())) { ec.getLocalComponents().add(sch.getServiceComponentName()); } } // Hack - Remove passwords from configs if ((ec.getRole().equals(Role.HIVE_CLIENT.toString()) || ec.getRole().equals(Role.WEBHCAT_SERVER.toString()) || ec.getRole().equals(Role.HCAT.toString())) && ec.getConfigurations().containsKey(Configuration.HIVE_CONFIG_TAG)) { ec.getConfigurations().get(Configuration.HIVE_CONFIG_TAG).remove(Configuration.HIVE_METASTORE_PASSWORD_PROPERTY); } // Add attributes Map<String, Map<String, Map<String, String>>> configAttributes = configHelper.getEffectiveConfigAttributes(cluster, ec.getConfigurationTags()); for (Map.Entry<String, Map<String, Map<String, String>>> attributesOccurrence : configAttributes.entrySet()) { String type = attributesOccurrence.getKey(); Map<String, Map<String, String>> attributes = attributesOccurrence.getValue(); if (ec.getConfigurationAttributes() != null) { if (!ec.getConfigurationAttributes().containsKey(type)) { ec.getConfigurationAttributes().put(type, new TreeMap<String, Map<String, String>>()); } configHelper.cloneAttributesMap(attributes, ec.getConfigurationAttributes().get(type)); } } return ec; } @Override public Set<StackConfigurationDependencyResponse> getStackConfigurationDependencies( Set<StackConfigurationDependencyRequest> requests) throws AmbariException { Set<StackConfigurationDependencyResponse> response = new HashSet<StackConfigurationDependencyResponse>(); if (requests != null) { for (StackConfigurationDependencyRequest request : requests) { String stackName = request.getStackName(); String stackVersion = request.getStackVersion(); String serviceName = request.getServiceName(); String propertyName = request.getPropertyName(); Set<StackConfigurationDependencyResponse> stackConfigurations = getStackConfigurationDependencies(request); for (StackConfigurationDependencyResponse dependencyResponse : stackConfigurations) { dependencyResponse.setStackName(stackName); dependencyResponse.setStackVersion(stackVersion); dependencyResponse.setServiceName(serviceName); dependencyResponse.setPropertyName(propertyName); } response.addAll(stackConfigurations); } } return response; } private Set<StackConfigurationDependencyResponse> getStackConfigurationDependencies(StackConfigurationDependencyRequest request) throws AmbariException { Set<StackConfigurationDependencyResponse> response = new HashSet<StackConfigurationDependencyResponse>(); String stackName = request.getStackName(); String stackVersion = request.getStackVersion(); String serviceName = request.getServiceName(); String propertyName = request.getPropertyName(); String dependencyName = request.getDependencyName(); Set<PropertyInfo> properties = ambariMetaInfo.getPropertiesByName(stackName, stackVersion, serviceName, propertyName); for (PropertyInfo property: properties) { for (PropertyDependencyInfo dependency: property.getDependedByProperties()) { if (dependencyName == null || dependency.getName().equals(dependencyName)) { response.add(dependency.convertToResponse()); } } } return response; } @Transactional void updateServiceStates( Cluster cluster, Map<State, List<Service>> changedServices, Map<State, List<ServiceComponent>> changedComps, Map<String, Map<State, List<ServiceComponentHost>>> changedScHosts, Collection<ServiceComponentHost> ignoredScHosts ) { if (changedServices != null) { for (Entry<State, List<Service>> entry : changedServices.entrySet()) { State newState = entry.getKey(); for (Service s : entry.getValue()) { if (s.isClientOnlyService() && newState == State.STARTED) { continue; } s.setDesiredState(newState); } } } if (changedComps != null) { for (Entry<State, List<ServiceComponent>> entry : changedComps.entrySet()) { State newState = entry.getKey(); for (ServiceComponent sc : entry.getValue()) { sc.setDesiredState(newState); } } } for (Map<State, List<ServiceComponentHost>> stateScHostMap : changedScHosts.values()) { for (Entry<State, List<ServiceComponentHost>> entry : stateScHostMap.entrySet()) { State newState = entry.getKey(); for (ServiceComponentHost sch : entry.getValue()) { sch.setDesiredState(newState); } } } if (ignoredScHosts != null) { for (ServiceComponentHost scHost : ignoredScHosts) { scHost.setDesiredState(scHost.getState()); } } } @Override public RequestStatusResponse createAndPersistStages(Cluster cluster, Map<String, String> requestProperties, Map<String, String> requestParameters, Map<State, List<Service>> changedServices, Map<State, List<ServiceComponent>> changedComponents, Map<String, Map<State, List<ServiceComponentHost>>> changedHosts, Collection<ServiceComponentHost> ignoredHosts, boolean runSmokeTest, boolean reconfigureClients) throws AmbariException { RequestStageContainer request = addStages(null, cluster, requestProperties, requestParameters, changedServices, changedComponents, changedHosts, ignoredHosts, runSmokeTest, reconfigureClients); request.persist(); return request.getRequestStatusResponse(); } @Override public RequestStageContainer addStages(RequestStageContainer requestStages, Cluster cluster, Map<String, String> requestProperties, Map<String, String> requestParameters, Map<State, List<Service>> changedServices, Map<State, List<ServiceComponent>> changedComponents, Map<String, Map<State, List<ServiceComponentHost>>> changedHosts, Collection<ServiceComponentHost> ignoredHosts, boolean runSmokeTest, boolean reconfigureClients) throws AmbariException { if (requestStages == null) { requestStages = new RequestStageContainer(actionManager.getNextRequestId(), null, requestFactory, actionManager); } requestStages = doStageCreation(requestStages, cluster, changedServices, changedComponents, changedHosts, requestParameters, requestProperties, runSmokeTest, reconfigureClients); Lock clusterWriteLock = cluster.getClusterGlobalLock().writeLock(); clusterWriteLock.lock(); try { updateServiceStates(cluster, changedServices, changedComponents, changedHosts, ignoredHosts); } finally { clusterWriteLock.unlock(); } return requestStages; } //todo: for now made this public since is is still used by createHostComponents //todo: delete after all host component logic is in HostComponentResourceProvider public void validateServiceComponentHostRequest(ServiceComponentHostRequest request) { if (request.getClusterName() == null || request.getClusterName().isEmpty() || request.getComponentName() == null || request.getComponentName().isEmpty() || request.getHostname() == null || request.getHostname().isEmpty()) { throw new IllegalArgumentException("Invalid arguments" + ", cluster name, component name and host name should be" + " provided"); } if (request.getAdminState() != null) { throw new IllegalArgumentException("Property adminState cannot be modified through update. Use service " + "specific DECOMMISSION action to decommision/recommission components."); } } @Override public String findServiceName(Cluster cluster, String componentName) throws AmbariException { StackId stackId = cluster.getDesiredStackVersion(); String serviceName = ambariMetaInfo.getComponentToService(stackId.getStackName(), stackId.getStackVersion(), componentName); if (LOG.isDebugEnabled()) { LOG.debug("Looking up service name for component" + ", componentName=" + componentName + ", serviceName=" + serviceName); } if (serviceName == null || serviceName.isEmpty()) { throw new AmbariException("Could not find service for component" + ", componentName=" + componentName + ", clusterName=" + cluster.getClusterName() + ", stackInfo=" + stackId.getStackId()); } return serviceName; } /** * Updates the users specified. * * @param requests the users to modify * * @throws AmbariException if the resources cannot be updated * @throws IllegalArgumentException if the authenticated user is not authorized to update all of * the requested properties */ @Override public synchronized void updateUsers(Set<UserRequest> requests) throws AmbariException, AuthorizationException { boolean isUserAdministrator = AuthorizationHelper.isAuthorized(ResourceType.AMBARI, null, RoleAuthorization.AMBARI_MANAGE_USERS); String authenticatedUsername = AuthorizationHelper.getAuthenticatedName(); for (UserRequest request : requests) { String requestedUsername = request.getUsername(); // An administrator can modify any user, else a user can only modify themself. if (!isUserAdministrator && (!authenticatedUsername.equalsIgnoreCase(requestedUsername))) { throw new AuthorizationException(); } User u = users.getAnyUser(requestedUsername); if (null == u) { continue; } if (null != request.isActive()) { // If this value is being set, make sure the authenticated user is an administrator before // allowing to change it. Only administrators should be able to change a user's active state if (!isUserAdministrator) { throw new AuthorizationException("The authenticated user is not authorized to update the requested resource property"); } users.setUserActive(u.getUserName(), request.isActive()); } if (null != request.isAdmin()) { // If this value is being set, make sure the authenticated user is an administrator before // allowing to change it. Only administrators should be able to change a user's administrative // privileges if (!isUserAdministrator) { throw new AuthorizationException("The authenticated user is not authorized to update the requested resource property"); } if (request.isAdmin()) { users.grantAdminPrivilege(u.getUserId()); } else { users.revokeAdminPrivilege(u.getUserId()); } } if (null != request.getOldPassword() && null != request.getPassword()) { users.modifyPassword(u.getUserName(), request.getOldPassword(), request.getPassword()); } } } @Override public synchronized void deleteCluster(ClusterRequest request) throws AmbariException { if (request.getClusterName() == null || request.getClusterName().isEmpty()) { // FIXME throw correct error throw new AmbariException("Invalid arguments"); } LOG.info("Received a delete cluster request" + ", clusterName=" + request.getClusterName()); if (request.getHostNames() != null) { // FIXME treat this as removing a host from a cluster? } else { // deleting whole cluster clusters.deleteCluster(request.getClusterName()); } } @Override public RequestStatusResponse deleteHostComponents( Set<ServiceComponentHostRequest> requests) throws AmbariException, AuthorizationException { Set<ServiceComponentHostRequest> expanded = new HashSet<ServiceComponentHostRequest>(); // if any request are for the whole host, they need to be expanded for (ServiceComponentHostRequest request : requests) { if (null == request.getComponentName()) { if (null == request.getClusterName() || request.getClusterName().isEmpty() || null == request.getHostname() || request.getHostname().isEmpty()) { throw new IllegalArgumentException("Cluster name and hostname must be specified."); } Cluster cluster = clusters.getCluster(request.getClusterName()); if(!AuthorizationHelper.isAuthorized(ResourceType.CLUSTER, cluster.getResourceId(), EnumSet.of(RoleAuthorization.SERVICE_ADD_DELETE_SERVICES,RoleAuthorization.HOST_ADD_DELETE_COMPONENTS))) { throw new AuthorizationException("The authenticated user is not authorized to delete service components from hosts"); } for (ServiceComponentHost sch : cluster.getServiceComponentHosts(request.getHostname())) { ServiceComponentHostRequest schr = new ServiceComponentHostRequest(request.getClusterName(), sch.getServiceName(), sch.getServiceComponentName(), sch.getHostName(), null); expanded.add(schr); } } else { expanded.add(request); } } Map<ServiceComponent, Set<ServiceComponentHost>> safeToRemoveSCHs = new HashMap<ServiceComponent, Set<ServiceComponentHost>>(); for (ServiceComponentHostRequest request : expanded) { validateServiceComponentHostRequest(request); Cluster cluster = clusters.getCluster(request.getClusterName()); if (StringUtils.isEmpty(request.getServiceName())) { request.setServiceName(findServiceName(cluster, request.getComponentName())); } if (LOG.isDebugEnabled()) { LOG.debug("Received a hostComponent DELETE request" + ", clusterName=" + request.getClusterName() + ", serviceName=" + request.getServiceName() + ", componentName=" + request.getComponentName() + ", hostname=" + request.getHostname() + ", request=" + request); } Service service = cluster.getService(request.getServiceName()); ServiceComponent component = service.getServiceComponent(request.getComponentName()); ServiceComponentHost componentHost = component.getServiceComponentHost(request.getHostname()); if (!componentHost.canBeRemoved()) { throw new AmbariException("Host Component cannot be removed" + ", clusterName=" + request.getClusterName() + ", serviceName=" + request.getServiceName() + ", componentName=" + request.getComponentName() + ", hostname=" + request.getHostname() + ", request=" + request); } // Only allow removing master/slave components in DISABLED/UNKNOWN/INSTALL_FAILED/INIT state without stages // generation. // Clients may be removed without a state check. if (!component.isClientComponent() && !componentHost.getState().isRemovableState()) { throw new AmbariException("To remove master or slave components they must be in " + "DISABLED/INIT/INSTALLED/INSTALL_FAILED/UNKNOWN state. Current=" + componentHost.getState() + "."); } setRestartRequiredServices(service, request.getComponentName()); if (!safeToRemoveSCHs.containsKey(component)) { safeToRemoveSCHs.put(component, new HashSet<ServiceComponentHost>()); } safeToRemoveSCHs.get(component).add(componentHost); } for (Entry<ServiceComponent, Set<ServiceComponentHost>> entry : safeToRemoveSCHs.entrySet()) { for (ServiceComponentHost componentHost : entry.getValue()) { String included_hostname = componentHost.getHostName(); String serviceName = entry.getKey().getServiceName(); String master_component_name = null; String slave_component_name = componentHost.getServiceComponentName(); HostComponentAdminState desiredAdminState = componentHost.getComponentAdminState(); State slaveState = componentHost.getState(); //Delete hostcomponents entry.getKey().deleteServiceComponentHosts(componentHost.getHostName()); // If deleted hostcomponents support decomission and were decommited and stopped if (AmbariCustomCommandExecutionHelper.masterToSlaveMappingForDecom.containsValue(slave_component_name) && desiredAdminState.equals(HostComponentAdminState.DECOMMISSIONED) && slaveState.equals(State.INSTALLED)) { for (Entry<String, String> entrySet : AmbariCustomCommandExecutionHelper.masterToSlaveMappingForDecom.entrySet()) { if (entrySet.getValue().equals(slave_component_name)) { master_component_name = entrySet.getKey(); } } //Clear exclud file or draining list except HBASE if (!serviceName.equals(Service.Type.HBASE.toString())) { HashMap<String, String> requestProperties = new HashMap<String, String>(); requestProperties.put("context", "Remove host " + included_hostname + " from exclude file"); requestProperties.put("exclusive", "true"); HashMap<String, String> params = new HashMap<String, String>(); params.put("included_hosts", included_hostname); params.put("slave_type", slave_component_name); params.put(AmbariCustomCommandExecutionHelper.UPDATE_EXCLUDE_FILE_ONLY, "true"); //Create filter for RECOMISSION command RequestResourceFilter resourceFilter = new RequestResourceFilter(serviceName, master_component_name, null); //Create request for RECOMISSION command ExecuteActionRequest actionRequest = new ExecuteActionRequest( entry.getKey().getClusterName(), AmbariCustomCommandExecutionHelper.DECOMMISSION_COMMAND_NAME, null, Collections.singletonList(resourceFilter), null, params, true); //Send request createAction(actionRequest, requestProperties); } //Mark master component as needed to restart for remove host info from components UI Cluster cluster = clusters.getCluster(entry.getKey().getClusterName()); Service service = cluster.getService(serviceName); ServiceComponent sc = service.getServiceComponent(master_component_name); if (sc != null && sc.isMasterComponent()) { for (ServiceComponentHost sch : sc.getServiceComponentHosts().values()) { sch.setRestartRequired(true); } } } } } // set restartRequired flag for monitoring services if (!safeToRemoveSCHs.isEmpty()) { setMonitoringServicesRestartRequired(requests); } return null; } @Override public void deleteUsers(Set<UserRequest> requests) throws AmbariException { for (UserRequest r : requests) { if (LOG.isDebugEnabled()) { LOG.debug("Received a delete user request" + ", username=" + r.getUsername()); } User u = users.getAnyUser(r.getUsername()); if (null != u) { users.removeUser(u); } } } @Override public void deleteGroups(Set<GroupRequest> requests) throws AmbariException { for (GroupRequest request: requests) { LOG.debug("Received a delete group request, groupname=" + request.getGroupName()); final Group group = users.getGroup(request.getGroupName()); if (group != null) { users.removeGroup(group); } } } @Override public void deleteMembers(java.util.Set<MemberRequest> requests) throws AmbariException { for (MemberRequest request : requests) { LOG.debug("Received a delete member request, " + request); users.removeMemberFromGroup(request.getGroupName(), request.getUserName()); } } /** * Get a request response for the given request ids. Note that this method * fully populates a request resource including the set of task sub-resources * in the request response. */ RequestStatusResponse getRequestStatusResponse(long requestId) { RequestStatusResponse response = new RequestStatusResponse(requestId); List<HostRoleCommand> hostRoleCommands = actionManager.getRequestTasks(requestId); response.setRequestContext(actionManager.getRequestContext(requestId)); List<ShortTaskStatus> tasks = new ArrayList<ShortTaskStatus>(); for (HostRoleCommand hostRoleCommand : hostRoleCommands) { tasks.add(new ShortTaskStatus(hostRoleCommand)); } response.setTasks(tasks); return response; } @Override public Set<ClusterResponse> getClusters(Set<ClusterRequest> requests) throws AmbariException, AuthorizationException { Set<ClusterResponse> response = new HashSet<ClusterResponse>(); for (ClusterRequest request : requests) { try { response.addAll(getClusters(request)); } catch (ClusterNotFoundException e) { if (requests.size() == 1) { // only throw exception if 1 request. // there will be > 1 request in case of OR predicate throw e; } } } return response; } @Override public Set<ServiceComponentHostResponse> getHostComponents( Set<ServiceComponentHostRequest> requests) throws AmbariException { LOG.debug("Processing requests: {}", requests); Set<ServiceComponentHostResponse> response = new HashSet<ServiceComponentHostResponse>(); for (ServiceComponentHostRequest request : requests) { try { response.addAll(getHostComponents(request)); } catch (ServiceComponentHostNotFoundException e) { if (requests.size() == 1) { // only throw exception if 1 request. // there will be > 1 request in case of OR predicate throw e; } else { LOG.debug("Ignoring not found exception due to other requests", e); } } catch (ServiceNotFoundException e) { if (requests.size() == 1) { // only throw exception if 1 request. // there will be > 1 request in case of OR predicate // In 'OR' case, a host_component may be included in predicate // that has no corresponding service throw e; } else { LOG.debug("Ignoring not found exception due to other requests", e); } } catch (ServiceComponentNotFoundException e) { if (requests.size() == 1) { // only throw exception if 1 request. // there will be > 1 request in case of OR predicate // In 'OR' case, a host_component may be included in predicate // that has no corresponding component throw e; } else { LOG.debug("Ignoring not found exception due to other requests", e); } } catch (ParentObjectNotFoundException e) { // If there is only one request, always throw exception. // There will be > 1 request in case of OR predicate. // For HostNotFoundException, only throw exception if host_name is // provided in URL. If host_name is part of query, don't throw exception. boolean throwException = true; if (requests.size() > 1 && HostNotFoundException.class.isInstance(e.getCause())) { for (ServiceComponentHostRequest r : requests) { if (r.getHostname() == null) { // host_name provided in query since all requests don't have host_name set throwException = false; LOG.debug("HostNotFoundException ignored", e); break; } } } if (throwException) { throw e; } } } return response; } @Override public Set<ConfigurationResponse> getConfigurations( Set<ConfigurationRequest> requests) throws AmbariException { Set<ConfigurationResponse> response = new HashSet<ConfigurationResponse>(); for (ConfigurationRequest request : requests) { response.addAll(getConfigurations(request)); } return response; } @Override public Set<ServiceConfigVersionResponse> getServiceConfigVersions(Set<ServiceConfigVersionRequest> requests) throws AmbariException { Set<ServiceConfigVersionResponse> responses = new LinkedHashSet<ServiceConfigVersionResponse>(); for (ServiceConfigVersionRequest request : requests) { responses.addAll(getServiceConfigVersions(request)); } return responses; } private Set<ServiceConfigVersionResponse> getServiceConfigVersions(ServiceConfigVersionRequest request) throws AmbariException { if (request.getClusterName() == null) { throw new IllegalArgumentException("Invalid arguments, cluster name" + " should not be null"); } Cluster cluster = clusters.getCluster(request.getClusterName()); Set<ServiceConfigVersionResponse> result = new LinkedHashSet<ServiceConfigVersionResponse>(); for (ServiceConfigVersionResponse response : cluster.getServiceConfigVersions()) { if (request.getServiceName() != null && !StringUtils.equals(request.getServiceName(), response.getServiceName())) { continue; } if (request.getVersion() != null && NumberUtils.compare(request.getVersion(), response.getVersion()) != 0) { continue; } if (request.getUserName() != null && !StringUtils.equals(request.getUserName(), response.getUserName())) { continue; } result.add(response); } return result; } @Override public Set<UserResponse> getUsers(Set<UserRequest> requests) throws AmbariException, AuthorizationException { Set<UserResponse> responses = new HashSet<UserResponse>(); for (UserRequest r : requests) { if (LOG.isDebugEnabled()) { LOG.debug("Received a getUsers request" + ", userRequest=" + r.toString()); } String requestedUsername = r.getUsername(); String authenticatedUsername = AuthorizationHelper.getAuthenticatedName(); // A user resource may be retrieved by an administrator or the same user. if(!AuthorizationHelper.isAuthorized(ResourceType.AMBARI, null, RoleAuthorization.AMBARI_MANAGE_USERS)) { if (null == requestedUsername) { // Since the authenticated user is not the administrator, force only that user's resource // to be returned requestedUsername = authenticatedUsername; } else if (!requestedUsername.equalsIgnoreCase(authenticatedUsername)) { // Since the authenticated user is not the administrator and is asking for a different user, // throw an AuthorizationException throw new AuthorizationException(); } } // get them all if (null == requestedUsername) { for (User u : users.getAllUsers()) { UserResponse resp = new UserResponse(u.getUserName(), u.getUserType(), u.isLdapUser(), u.isActive(), u .isAdmin()); resp.setGroups(new HashSet<String>(u.getGroups())); responses.add(resp); } } else { User u = users.getAnyUser(requestedUsername); if (null == u) { if (requests.size() == 1) { // only throw exceptin if there is a single request // if there are multiple requests, this indicates an OR predicate throw new ObjectNotFoundException("Cannot find user '" + requestedUsername + "'"); } } else { UserResponse resp = new UserResponse(u.getUserName(), u.getUserType(), u.isLdapUser(), u.isActive(), u .isAdmin()); resp.setGroups(new HashSet<String>(u.getGroups())); responses.add(resp); } } } return responses; } @Override public Set<GroupResponse> getGroups(Set<GroupRequest> requests) throws AmbariException { final Set<GroupResponse> responses = new HashSet<GroupResponse>(); for (GroupRequest request: requests) { LOG.debug("Received a getGroups request, groupRequest=" + request.toString()); // get them all if (null == request.getGroupName()) { for (Group group: users.getAllGroups()) { final GroupResponse response = new GroupResponse(group.getGroupName(), group.isLdapGroup()); responses.add(response); } } else { final Group group = users.getGroup(request.getGroupName()); if (null == group) { if (requests.size() == 1) { // only throw exception if there is a single request // if there are multiple requests, this indicates an OR predicate throw new ObjectNotFoundException("Cannot find group '" + request.getGroupName() + "'"); } } else { final GroupResponse response = new GroupResponse(group.getGroupName(), group.isLdapGroup()); responses.add(response); } } } return responses; } @Override public void updateGroups(Set<GroupRequest> requests) throws AmbariException { // currently no group updates are supported } protected String getClientHostForRunningAction(Cluster cluster, Service service, ServiceComponent serviceComponent) throws AmbariException { if (serviceComponent != null && !serviceComponent.getServiceComponentHosts().isEmpty()) { Set<String> candidateHosts = serviceComponent.getServiceComponentHosts().keySet(); filterHostsForAction(candidateHosts, service, cluster, Resource.Type.Cluster); return getHealthyHost(candidateHosts); } return null; } protected ServiceComponent getClientComponentForRunningAction(Cluster cluster, Service service) throws AmbariException { /* * We assume Cluster level here. That means that we never run service * checks on clients/hosts that are in maintenance state. * That also means that we can not run service check if the only host * that has client component is in maintenance state */ StackId stackId = service.getDesiredStackVersion(); ComponentInfo compInfo = ambariMetaInfo.getService(stackId.getStackName(), stackId.getStackVersion(), service.getName()).getClientComponent(); if (compInfo != null) { try { ServiceComponent serviceComponent = service.getServiceComponent(compInfo.getName()); if (!serviceComponent.getServiceComponentHosts().isEmpty()) { return serviceComponent; } } catch (ServiceComponentNotFoundException e) { LOG.warn("Could not find required component to run action" + ", clusterName=" + cluster.getClusterName() + ", serviceName=" + service.getName() + ", componentName=" + compInfo.getName()); } } // any component will do Map<String, ServiceComponent> components = service.getServiceComponents(); if (!components.isEmpty()) { for (ServiceComponent serviceComponent : components.values()) { if (!serviceComponent.getServiceComponentHosts().isEmpty()) { return serviceComponent; } } } return null; } /** * Utility method that filters out hosts from set based on their maintenance * state status. */ protected void filterHostsForAction(Set<String> candidateHosts, Service service, final Cluster cluster, final Resource.Type level) throws AmbariException { Set<String> ignoredHosts = maintenanceStateHelper.filterHostsInMaintenanceState( candidateHosts, new MaintenanceStateHelper.HostPredicate() { @Override public boolean shouldHostBeRemoved(final String hostname) throws AmbariException { Host host = clusters.getHost(hostname); return !maintenanceStateHelper.isOperationAllowed( host, cluster.getClusterId(), level); } } ); LOG.debug("Ignoring hosts when selecting available hosts for action" + " due to maintenance state." + "Ignored hosts =" + ignoredHosts + ", cluster=" + cluster.getClusterName() + ", service=" + service.getName()); } @Override public String getHealthyHost(Set<String> hostList) throws AmbariException { String hostName = null; for (String candidateHostName : hostList) { hostName = candidateHostName; Host candidateHost = clusters.getHost(hostName); if (candidateHost.getState() == HostState.HEALTHY) { break; } } return hostName; } @Override public RequestStatusResponse createAction(ExecuteActionRequest actionRequest, Map<String, String> requestProperties) throws AmbariException { String clusterName = actionRequest.getClusterName(); String requestContext = ""; if (requestProperties != null) { requestContext = requestProperties.get(REQUEST_CONTEXT_PROPERTY); if (requestContext == null) { // guice needs a non-null value as there is no way to mark this parameter @Nullable requestContext = ""; } } Cluster cluster = null; if (null != clusterName) { cluster = clusters.getCluster(clusterName); LOG.info("Received action execution request" + ", clusterName=" + actionRequest.getClusterName() + ", request=" + actionRequest.toString()); } ActionExecutionContext actionExecContext = getActionExecutionContext(actionRequest); if (actionRequest.isCommand()) { customCommandExecutionHelper.validateAction(actionRequest); } else { actionExecutionHelper.validateAction(actionRequest); } // TODO Alejandro, Called First. insert params.version. Called during Rebalance HDFS, ZOOKEEPER Restart, Zookeeper Service Check. long requestId = actionManager.getNextRequestId(); RequestStageContainer requestStageContainer = new RequestStageContainer( requestId, null, requestFactory, actionManager, actionRequest); StackId stackId = null; if (null != cluster) { stackId = cluster.getDesiredStackVersion(); } ExecuteCommandJson jsons = customCommandExecutionHelper.getCommandJson(actionExecContext, cluster, stackId); String commandParamsForStage = jsons.getCommandParamsForStage(); Map<String, String> commandParamsStage = gson.fromJson(commandParamsForStage, new TypeToken<Map<String, String>>() {}.getType()); // Ensure that the specified requestContext (if any) is set as the request context if (!requestContext.isEmpty()) { requestStageContainer.setRequestContext(requestContext); } // replace password references in requestProperties SecretReference.replaceReferencesWithPasswords(commandParamsStage, cluster); // If the request is to perform the Kerberos service check, set up the stages to // ensure that the (cluster-level) smoke user principal and keytab is available on all hosts boolean kerberosServiceCheck = Role.KERBEROS_SERVICE_CHECK.name().equals(actionRequest.getCommandName()); if (kerberosServiceCheck) { // Parse the command parameters into a map so that additional values may be added to it try { requestStageContainer = kerberosHelper.createTestIdentity(cluster, commandParamsStage, requestStageContainer); } catch (KerberosOperationException e) { throw new IllegalArgumentException(e.getMessage(), e); } } commandParamsForStage = gson.toJson(commandParamsStage); Stage stage = createNewStage(requestStageContainer.getLastStageId(), cluster, requestId, requestContext, jsons.getClusterHostInfo(), commandParamsForStage, jsons.getHostParamsForStage()); if (actionRequest.isCommand()) { customCommandExecutionHelper.addExecutionCommandsToStage(actionExecContext, stage, requestProperties); } else { actionExecutionHelper.addExecutionCommandsToStage(actionExecContext, stage); } RoleGraph rg; if (null != cluster) { RoleCommandOrder rco = getRoleCommandOrder(cluster); rg = roleGraphFactory.createNew(rco); } else { rg = roleGraphFactory.createNew(); } rg.build(stage); List<Stage> stages = rg.getStages(); if (stages != null && !stages.isEmpty()) { requestStageContainer.addStages(stages); } // If the request is to perform the Kerberos service check, delete the test-specific principal // and keytab that was created for this service check if (kerberosServiceCheck) { // Parse the command parameters into a map so that existing values may be accessed and // additional values may be added to it. commandParamsStage = gson.fromJson(commandParamsForStage, new TypeToken<Map<String, String>>() { }.getType()); try { requestStageContainer = kerberosHelper.deleteTestIdentity(cluster, commandParamsStage, requestStageContainer); } catch (KerberosOperationException e) { throw new IllegalArgumentException(e.getMessage(), e); } } requestStageContainer.persist(); return requestStageContainer.getRequestStatusResponse(); } @Override public Set<StackResponse> getStacks(Set<StackRequest> requests) throws AmbariException { Set<StackResponse> response = new HashSet<StackResponse>(); for (StackRequest request : requests) { try { response.addAll(getStacks(request)); } catch (StackAccessException e) { if (requests.size() == 1) { // only throw exception if 1 request. // there will be > 1 request in case of OR predicate throw e; } } } return response; } private Set<StackResponse> getStacks(StackRequest request) throws AmbariException { Set<StackResponse> response; String stackName = request.getStackName(); if (stackName != null) { // this will throw an exception if the stack doesn't exist ambariMetaInfo.getStacks(stackName); response = Collections.singleton(new StackResponse(stackName)); } else { Collection<StackInfo> supportedStacks = ambariMetaInfo.getStacks(); response = new HashSet<StackResponse>(); for (StackInfo stack: supportedStacks) { response.add(new StackResponse(stack.getName())); } } return response; } @Override public synchronized RequestStatusResponse updateStacks() throws AmbariException { try { ambariMetaInfo.init(); } catch (AmbariException e) { throw e; } catch (Exception e) { throw new AmbariException( "Ambari Meta Information can't be read from the stack root directory"); } return null; } @Override public Set<RepositoryResponse> getRepositories(Set<RepositoryRequest> requests) throws AmbariException { Set<RepositoryResponse> response = new HashSet<RepositoryResponse>(); for (RepositoryRequest request : requests) { try { String stackName = request.getStackName(); String stackVersion = request.getStackVersion(); Set<RepositoryResponse> repositories = getRepositories(request); for (RepositoryResponse repositoryResponse : repositories) { if (repositoryResponse.getStackName() == null) { repositoryResponse.setStackName(stackName); } if (repositoryResponse.getStackVersion() == null) { repositoryResponse.setStackVersion(stackVersion); } repositoryResponse.setClusterVersionId(request.getClusterVersionId()); } response.addAll(repositories); } catch (StackAccessException e) { if (requests.size() == 1) { // only throw exception if 1 request. // there will be > 1 request in case of OR predicate throw e; } } } return response; } private Set<RepositoryResponse> getRepositories(RepositoryRequest request) throws AmbariException { String stackName = request.getStackName(); String stackVersion = request.getStackVersion(); String osType = request.getOsType(); String repoId = request.getRepoId(); Long repositoryVersionId = request.getRepositoryVersionId(); String versionDefinitionId = request.getVersionDefinitionId(); // !!! when asking for Repository responses for a versionDefinition, it is either for // an established repo version (a Long) OR from the in-memory generated ones (a String) if (null == repositoryVersionId && null != versionDefinitionId) { if (NumberUtils.isDigits(versionDefinitionId)) { repositoryVersionId = Long.valueOf(versionDefinitionId); } } Set<RepositoryResponse> responses = new HashSet<RepositoryResponse>(); if (repositoryVersionId != null) { final RepositoryVersionEntity repositoryVersion = repositoryVersionDAO.findByPK(repositoryVersionId); if (repositoryVersion != null) { for (OperatingSystemEntity operatingSystem: repositoryVersion.getOperatingSystems()) { if (operatingSystem.getOsType().equals(osType)) { for (RepositoryEntity repository: operatingSystem.getRepositories()) { final RepositoryResponse response = new RepositoryResponse(repository.getBaseUrl(), osType, repository.getRepositoryId(), repository.getName(), "", "", ""); if (null != versionDefinitionId) { response.setVersionDefinitionId(versionDefinitionId); } else { response.setRepositoryVersionId(repositoryVersionId); } response.setStackName(repositoryVersion.getStackName()); response.setStackVersion(repositoryVersion.getStackVersion()); responses.add(response); } break; } } } } else if (null != versionDefinitionId) { VersionDefinitionXml xml = ambariMetaInfo.getVersionDefinition(versionDefinitionId); if (null == xml) { throw new AmbariException(String.format("Version identified by %s does not exist", versionDefinitionId)); } StackId stackId = new StackId(xml.release.stackId); for (RepositoryXml.Os os : xml.repositoryInfo.getOses()) { for (RepositoryXml.Repo repo : os.getRepos()) { RepositoryResponse resp = new RepositoryResponse(repo.getBaseUrl(), os.getFamily(), repo.getRepoId(), repo.getRepoName(), repo.getMirrorsList(), repo.getBaseUrl(), repo.getLatestUri()); resp.setVersionDefinitionId(versionDefinitionId); resp.setStackName(stackId.getStackName()); resp.setStackVersion(stackId.getStackVersion()); responses.add(resp); } } } else { if (repoId == null) { List<RepositoryInfo> repositories = ambariMetaInfo.getRepositories(stackName, stackVersion, osType); for (RepositoryInfo repository: repositories) { responses.add(repository.convertToResponse()); } } else { RepositoryInfo repository = ambariMetaInfo.getRepository(stackName, stackVersion, osType, repoId); responses = Collections.singleton(repository.convertToResponse()); } } return responses; } @Override public void updateRepositories(Set<RepositoryRequest> requests) throws AmbariException { for (RepositoryRequest rr : requests) { if (null == rr.getStackName() || rr.getStackName().isEmpty()) { throw new AmbariException("Stack name must be specified."); } if (null == rr.getStackVersion() || rr.getStackVersion().isEmpty()) { throw new AmbariException("Stack version must be specified."); } if (null == rr.getOsType() || rr.getOsType().isEmpty()) { throw new AmbariException("OS type must be specified."); } if (null == rr.getRepoId() || rr.getRepoId().isEmpty()) { throw new AmbariException("Repo ID must be specified."); } if (null != rr.getBaseUrl()) { if (rr.isVerifyBaseUrl()) { verifyRepository(rr); } if (rr.getRepositoryVersionId() != null) { throw new AmbariException("Can't directly update repositories in repository_version, update the repository_version instead"); } ambariMetaInfo.updateRepoBaseURL(rr.getStackName(), rr.getStackVersion(), rr.getOsType(), rr.getRepoId(), rr.getBaseUrl()); } } } @Override public void verifyRepositories(Set<RepositoryRequest> requests) throws AmbariException { for (RepositoryRequest request: requests) { if (request.getBaseUrl() == null) { throw new AmbariException("Base url is missing for request " + request); } verifyRepository(request); } } /** * Verifies single repository, see {{@link #verifyRepositories(Set)}. * * @param request request * @throws AmbariException if verification fails */ private void verifyRepository(RepositoryRequest request) throws AmbariException { URLStreamProvider usp = new URLStreamProvider(REPO_URL_CONNECT_TIMEOUT, REPO_URL_READ_TIMEOUT, null, null, null); usp.setSetupTruststoreForHttps(false); RepositoryInfo repositoryInfo = ambariMetaInfo.getRepository(request.getStackName(), request.getStackVersion(), request.getOsType(), request.getRepoId()); String repoName = repositoryInfo.getRepoName(); String errorMessage = null; Exception e = null; String[] suffixes = configs.getRepoValidationSuffixes(request.getOsType()); for (String suffix : suffixes) { String formatted_suffix = String.format(suffix, repoName); String spec = request.getBaseUrl().trim(); // This logic is to identify if the end of baseurl has a slash ('/') and/or the beginning of suffix String (e.g. "/repodata/repomd.xml") // has a slash and they can form a good url. // e.g. "http://baseurl.com/" + "/repodata/repomd.xml" becomes "http://baseurl.com/repodata/repomd.xml" but not "http://baseurl.com//repodata/repomd.xml" if (spec.charAt(spec.length() - 1) != '/' && formatted_suffix.charAt(0) != '/') { spec = spec + "/" + formatted_suffix; } else if (spec.charAt(spec.length() - 1) == '/' && formatted_suffix.charAt(0) == '/') { spec = spec + formatted_suffix.substring(1); } else { spec = spec + formatted_suffix; } // if spec contains "file://" then check local file system. final String FILE_SCHEME = "file://"; if(spec.toLowerCase().startsWith(FILE_SCHEME)){ String filePath = spec.substring(FILE_SCHEME.length()); File f = new File(filePath); if(!f.exists()){ errorMessage = "Could not access base url . " + spec + " . "; e = new FileNotFoundException(errorMessage); break; } }else{ try { IOUtils.readLines(usp.readFrom(spec)); } catch (IOException ioe) { e = ioe; errorMessage = "Could not access base url . " + request.getBaseUrl() + " . "; if (LOG.isDebugEnabled()) { errorMessage += ioe; } else { errorMessage += ioe.getMessage(); } break; } } } if (e != null) { LOG.error(errorMessage); throw new IllegalArgumentException(errorMessage, e); } } @Override public Set<StackVersionResponse> getStackVersions( Set<StackVersionRequest> requests) throws AmbariException { Set<StackVersionResponse> response = new HashSet<StackVersionResponse>(); for (StackVersionRequest request : requests) { String stackName = request.getStackName(); try { Set<StackVersionResponse> stackVersions = getStackVersions(request); for (StackVersionResponse stackVersionResponse : stackVersions) { stackVersionResponse.setStackName(stackName); } response.addAll(stackVersions); } catch (StackAccessException e) { if (requests.size() == 1) { // only throw exception if 1 request. // there will be > 1 request in case of OR predicate throw e; } } } return response; } private Set<StackVersionResponse> getStackVersions(StackVersionRequest request) throws AmbariException { Set<StackVersionResponse> response; String stackName = request.getStackName(); String stackVersion = request.getStackVersion(); if (stackVersion != null) { StackInfo stackInfo = ambariMetaInfo.getStack(stackName, stackVersion); response = Collections.singleton(stackInfo.convertToResponse()); } else { try { Collection<StackInfo> stackInfos = ambariMetaInfo.getStacks(stackName); response = new HashSet<StackVersionResponse>(); for (StackInfo stackInfo: stackInfos) { response.add(stackInfo.convertToResponse()); } } catch (StackAccessException e) { response = Collections.emptySet(); } } return response; } @Override public Set<StackServiceResponse> getStackServices( Set<StackServiceRequest> requests) throws AmbariException { Set<StackServiceResponse> response = new HashSet<StackServiceResponse>(); for (StackServiceRequest request : requests) { String stackName = request.getStackName(); String stackVersion = request.getStackVersion(); try { Set<StackServiceResponse> stackServices = getStackServices(request); for (StackServiceResponse stackServiceResponse : stackServices) { stackServiceResponse.setStackName(stackName); stackServiceResponse.setStackVersion(stackVersion); } response.addAll(stackServices); } catch (StackAccessException e) { if (requests.size() == 1) { // only throw exception if 1 request. // there will be > 1 request in case of OR predicate throw e; } } } return response; } private Set<StackServiceResponse> getStackServices(StackServiceRequest request) throws AmbariException { Set<StackServiceResponse> response; String stackName = request.getStackName(); String stackVersion = request.getStackVersion(); String serviceName = request.getServiceName(); if (serviceName != null) { ServiceInfo service = ambariMetaInfo.getService(stackName, stackVersion, serviceName); response = Collections.singleton(new StackServiceResponse(service)); } else { Map<String, ServiceInfo> services = ambariMetaInfo.getServices(stackName, stackVersion); response = new HashSet<StackServiceResponse>(); for (ServiceInfo service : services.values()) { response.add(new StackServiceResponse(service)); } } return response; } @Override public Set<StackConfigurationResponse> getStackLevelConfigurations( Set<StackLevelConfigurationRequest> requests) throws AmbariException { Set<StackConfigurationResponse> response = new HashSet<StackConfigurationResponse>(); for (StackLevelConfigurationRequest request : requests) { String stackName = request.getStackName(); String stackVersion = request.getStackVersion(); Set<StackConfigurationResponse> stackConfigurations = getStackLevelConfigurations(request); for (StackConfigurationResponse stackConfigurationResponse : stackConfigurations) { stackConfigurationResponse.setStackName(stackName); stackConfigurationResponse.setStackVersion(stackVersion); } response.addAll(stackConfigurations); } return response; } private Set<StackConfigurationResponse> getStackLevelConfigurations( StackLevelConfigurationRequest request) throws AmbariException { Set<StackConfigurationResponse> response = new HashSet<StackConfigurationResponse>(); String stackName = request.getStackName(); String stackVersion = request.getStackVersion(); String propertyName = request.getPropertyName(); Set<PropertyInfo> properties; if (propertyName != null) { properties = ambariMetaInfo.getStackPropertiesByName(stackName, stackVersion, propertyName); } else { properties = ambariMetaInfo.getStackProperties(stackName, stackVersion); } for (PropertyInfo property: properties) { response.add(property.convertToResponse()); } return response; } @Override public Set<StackConfigurationResponse> getStackConfigurations( Set<StackConfigurationRequest> requests) throws AmbariException { Set<StackConfigurationResponse> response = new HashSet<StackConfigurationResponse>(); for (StackConfigurationRequest request : requests) { String stackName = request.getStackName(); String stackVersion = request.getStackVersion(); String serviceName = request.getServiceName(); Set<StackConfigurationResponse> stackConfigurations = getStackConfigurations(request); for (StackConfigurationResponse stackConfigurationResponse : stackConfigurations) { stackConfigurationResponse.setStackName(stackName); stackConfigurationResponse.setStackVersion(stackVersion); stackConfigurationResponse.setServiceName(serviceName); } response.addAll(stackConfigurations); } return response; } private Set<StackConfigurationResponse> getStackConfigurations( StackConfigurationRequest request) throws AmbariException { Set<StackConfigurationResponse> response = new HashSet<StackConfigurationResponse>(); String stackName = request.getStackName(); String stackVersion = request.getStackVersion(); String serviceName = request.getServiceName(); String propertyName = request.getPropertyName(); Set<PropertyInfo> properties; if (propertyName != null) { properties = ambariMetaInfo.getPropertiesByName(stackName, stackVersion, serviceName, propertyName); } else { properties = ambariMetaInfo.getServiceProperties(stackName, stackVersion, serviceName); } for (PropertyInfo property: properties) { response.add(property.convertToResponse()); } return response; } @Override public Set<StackServiceComponentResponse> getStackComponents( Set<StackServiceComponentRequest> requests) throws AmbariException { Set<StackServiceComponentResponse> response = new HashSet<StackServiceComponentResponse>(); for (StackServiceComponentRequest request : requests) { String stackName = request.getStackName(); String stackVersion = request.getStackVersion(); String serviceName = request.getServiceName(); try { Set<StackServiceComponentResponse> stackComponents = getStackComponents(request); for (StackServiceComponentResponse stackServiceComponentResponse : stackComponents) { stackServiceComponentResponse.setStackName(stackName); stackServiceComponentResponse.setStackVersion(stackVersion); stackServiceComponentResponse.setServiceName(serviceName); } response.addAll(stackComponents); } catch (StackAccessException e) { if (requests.size() == 1) { // only throw exception if 1 request. // there will be > 1 request in case of OR predicate throw e; } } } return response; } private Set<StackServiceComponentResponse> getStackComponents( StackServiceComponentRequest request) throws AmbariException { Set<StackServiceComponentResponse> response; String stackName = request.getStackName(); String stackVersion = request.getStackVersion(); String serviceName = request.getServiceName(); String componentName = request.getComponentName(); if (componentName != null) { ComponentInfo component = ambariMetaInfo.getComponent(stackName, stackVersion, serviceName, componentName); response = Collections.singleton(new StackServiceComponentResponse( component)); } else { List<ComponentInfo> components = ambariMetaInfo.getComponentsByService(stackName, stackVersion, serviceName); response = new HashSet<StackServiceComponentResponse>(); for (ComponentInfo component: components) { response.add(new StackServiceComponentResponse(component)); } } return response; } @Override public Set<OperatingSystemResponse> getOperatingSystems( Set<OperatingSystemRequest> requests) throws AmbariException { Set<OperatingSystemResponse> response = new HashSet<OperatingSystemResponse>(); for (OperatingSystemRequest request : requests) { try { String stackName = request.getStackName(); String stackVersion = request.getStackVersion(); Set<OperatingSystemResponse> stackOperatingSystems = getOperatingSystems(request); for (OperatingSystemResponse operatingSystemResponse : stackOperatingSystems) { if (operatingSystemResponse.getStackName() == null) { operatingSystemResponse.setStackName(stackName); } if (operatingSystemResponse.getStackVersion() == null) { operatingSystemResponse.setStackVersion(stackVersion); } } response.addAll(stackOperatingSystems); } catch (StackAccessException e) { if (requests.size() == 1) { // only throw exception if 1 request. // there will be > 1 request in case of OR predicate throw e; } } } return response; } private Set<OperatingSystemResponse> getOperatingSystems( OperatingSystemRequest request) throws AmbariException { Set<OperatingSystemResponse> responses = new HashSet<OperatingSystemResponse>(); String stackName = request.getStackName(); String stackVersion = request.getStackVersion(); String osType = request.getOsType(); Long repositoryVersionId = request.getRepositoryVersionId(); String versionDefinitionId = request.getVersionDefinitionId(); // !!! when asking for OperatingSystem responses for a versionDefinition, it is either for // an established repo version (a Long) OR from the in-memory generated ones (a String) if (null == repositoryVersionId && null != versionDefinitionId) { if (NumberUtils.isDigits(versionDefinitionId)) { repositoryVersionId = Long.valueOf(versionDefinitionId); } } if (repositoryVersionId != null) { final RepositoryVersionEntity repositoryVersion = repositoryVersionDAO.findByPK(repositoryVersionId); if (repositoryVersion != null) { for (OperatingSystemEntity operatingSystem: repositoryVersion.getOperatingSystems()) { final OperatingSystemResponse response = new OperatingSystemResponse(operatingSystem.getOsType()); if (null != versionDefinitionId) { response.setVersionDefinitionId(repositoryVersionId.toString()); } else { response.setRepositoryVersionId(repositoryVersionId); } response.setStackName(repositoryVersion.getStackName()); response.setStackVersion(repositoryVersion.getStackVersion()); response.setAmbariManagedRepos(operatingSystem.isAmbariManagedRepos()); responses.add(response); } } } else if (null != versionDefinitionId) { VersionDefinitionXml xml = ambariMetaInfo.getVersionDefinition(versionDefinitionId); if (null == xml) { throw new AmbariException(String.format("Version identified by %s does not exist", versionDefinitionId)); } StackId stackId = new StackId(xml.release.stackId); for (RepositoryXml.Os os : xml.repositoryInfo.getOses()) { OperatingSystemResponse resp = new OperatingSystemResponse(os.getFamily()); resp.setVersionDefinitionId(versionDefinitionId); resp.setStackName(stackId.getStackName()); resp.setStackVersion(stackId.getStackVersion()); responses.add(resp); } } else { if (osType != null) { OperatingSystemInfo operatingSystem = ambariMetaInfo.getOperatingSystem(stackName, stackVersion, osType); responses = Collections.singleton(operatingSystem.convertToResponse()); } else { Set<OperatingSystemInfo> operatingSystems = ambariMetaInfo.getOperatingSystems(stackName, stackVersion); for (OperatingSystemInfo operatingSystem : operatingSystems) { responses.add(operatingSystem.convertToResponse()); } } } return responses; } @Override public String getAuthName() { return AuthorizationHelper.getAuthenticatedName(configs.getAnonymousAuditName()); } @Override public Set<RootServiceResponse> getRootServices( Set<RootServiceRequest> requests) throws AmbariException { Set<RootServiceResponse> response = new HashSet<RootServiceResponse>(); for (RootServiceRequest request : requests) { try { response.addAll(getRootServices(request)); } catch (AmbariException e) { if (requests.size() == 1) { // only throw exception if 1 request. // there will be > 1 request in case of OR predicate throw e; } } } return response; } private Set<RootServiceResponse> getRootServices (RootServiceRequest request) throws AmbariException{ return rootServiceResponseFactory.getRootServices(request); } @Override public Set<RootServiceComponentResponse> getRootServiceComponents( Set<RootServiceComponentRequest> requests) throws AmbariException { Set<RootServiceComponentResponse> response = new HashSet<RootServiceComponentResponse>(); for (RootServiceComponentRequest request : requests) { String serviceName = request.getServiceName(); try { Set<RootServiceComponentResponse> rootServiceComponents = getRootServiceComponents(request); for (RootServiceComponentResponse serviceComponentResponse : rootServiceComponents) { serviceComponentResponse.setServiceName(serviceName); } response.addAll(rootServiceComponents); } catch (AmbariException e) { if (requests.size() == 1) { // only throw exception if 1 request. // there will be > 1 request in case of OR predicate throw e; } } } return response; } private Set<RootServiceComponentResponse> getRootServiceComponents( RootServiceComponentRequest request) throws AmbariException{ return rootServiceResponseFactory.getRootServiceComponents(request); } @Override public Clusters getClusters() { return clusters; } @Override public ConfigHelper getConfigHelper() { return configHelper; } @Override public AmbariMetaInfo getAmbariMetaInfo() { return ambariMetaInfo; } @Override public ServiceFactory getServiceFactory() { return serviceFactory; } @Override public ServiceComponentFactory getServiceComponentFactory() { return serviceComponentFactory; } @Override public ConfigGroupFactory getConfigGroupFactory() { return configGroupFactory; } @Override public RoleGraphFactory getRoleGraphFactory() { return roleGraphFactory; } @Override public AbstractRootServiceResponseFactory getRootServiceResponseFactory() { return rootServiceResponseFactory; } @Override public ActionManager getActionManager() { return actionManager; } @Override public String getJdkResourceUrl() { return jdkResourceUrl; } @Override public String getJavaHome() { return javaHome; } @Override public String getJDKName() { return jdkName; } @Override public String getJCEName() { return jceName; } @Override public String getServerDB() { return serverDB; } @Override public String getOjdbcUrl() { return ojdbcUrl; } @Override public String getMysqljdbcUrl() { return mysqljdbcUrl; } @Override public Map<String, String> getRcaParameters() { String hostName = StageUtils.getHostName(); String url = configs.getRcaDatabaseUrl(); if (url.contains(Configuration.HOSTNAME_MACRO)) { url = url.replace(Configuration.HOSTNAME_MACRO, hostsMap.getHostMap(hostName)); } Map<String, String> rcaParameters = new HashMap<String, String>(); rcaParameters.put(AMBARI_DB_RCA_URL, url); rcaParameters.put(AMBARI_DB_RCA_DRIVER, configs.getRcaDatabaseDriver()); rcaParameters.put(AMBARI_DB_RCA_USERNAME, configs.getRcaDatabaseUser()); rcaParameters.put(AMBARI_DB_RCA_PASSWORD, configs.getRcaDatabasePassword()); return rcaParameters; } @Override public boolean checkLdapConfigured() { return ldapDataPopulator.isLdapEnabled(); } @Override public LdapSyncDto getLdapSyncInfo() throws AmbariException { return ldapDataPopulator.getLdapSyncInfo(); } @Override public boolean isLdapSyncInProgress() { return ldapSyncInProgress; } @Override public synchronized LdapBatchDto synchronizeLdapUsersAndGroups( LdapSyncRequest userRequest, LdapSyncRequest groupRequest) throws AmbariException { ldapSyncInProgress = true; try { final LdapBatchDto batchInfo = new LdapBatchDto(); if (userRequest != null) { switch (userRequest.getType()) { case ALL: ldapDataPopulator.synchronizeAllLdapUsers(batchInfo); break; case EXISTING: ldapDataPopulator.synchronizeExistingLdapUsers(batchInfo); break; case SPECIFIC: ldapDataPopulator.synchronizeLdapUsers(userRequest.getPrincipalNames(), batchInfo); break; } } if (groupRequest != null) { switch (groupRequest.getType()) { case ALL: ldapDataPopulator.synchronizeAllLdapGroups(batchInfo); break; case EXISTING: ldapDataPopulator.synchronizeExistingLdapGroups(batchInfo); break; case SPECIFIC: ldapDataPopulator.synchronizeLdapGroups(groupRequest.getPrincipalNames(), batchInfo); break; } } users.processLdapSync(batchInfo); return batchInfo; } finally { ldapSyncInProgress = false; } } @SuppressWarnings("unchecked") @Override public void initializeWidgetsAndLayouts(Cluster cluster, Service service) throws AmbariException { StackId stackId = cluster.getDesiredStackVersion(); Type widgetLayoutType = new TypeToken<Map<String, List<WidgetLayout>>>(){}.getType(); try { Map<String, Object> widgetDescriptor = null; StackInfo stackInfo = ambariMetaInfo.getStack(stackId.getStackName(), stackId.getStackVersion()); if (service != null) { // Service widgets ServiceInfo serviceInfo = stackInfo.getService(service.getName()); File widgetDescriptorFile = serviceInfo.getWidgetsDescriptorFile(); if (widgetDescriptorFile != null && widgetDescriptorFile.exists()) { try { widgetDescriptor = gson.fromJson(new FileReader(widgetDescriptorFile), widgetLayoutType); } catch (Exception ex) { String msg = "Error loading widgets from file: " + widgetDescriptorFile; LOG.error(msg, ex); throw new AmbariException(msg); } } } else { // Cluster level widgets String widgetDescriptorFileLocation = stackInfo.getWidgetsDescriptorFileLocation(); if (widgetDescriptorFileLocation != null) { File widgetDescriptorFile = new File(widgetDescriptorFileLocation); if (widgetDescriptorFile.exists()) { try { widgetDescriptor = gson.fromJson(new FileReader(widgetDescriptorFile), widgetLayoutType); } catch (Exception ex) { String msg = "Error loading widgets from file: " + widgetDescriptorFile; LOG.error(msg, ex); throw new AmbariException(msg); } } } } if (widgetDescriptor != null) { LOG.debug("Loaded widget descriptor: " + widgetDescriptor); for (Object artifact : widgetDescriptor.values()) { List<WidgetLayout> widgetLayouts = (List<WidgetLayout>) artifact; createWidgetsAndLayouts(cluster, widgetLayouts); } } } catch (Exception e) { throw new AmbariException("Error creating stack widget artifacts. " + (service != null ? "Service: " + service.getName() + ", " : "") + "Cluster: " + cluster.getClusterName(), e); } } private WidgetEntity addIfNotExistsWidgetEntity(WidgetLayoutInfo layoutInfo, ClusterEntity clusterEntity, String user, long createTime) { List<WidgetEntity> createdEntities = widgetDAO.findByName(clusterEntity.getClusterId(), layoutInfo.getWidgetName(), user, layoutInfo.getDefaultSectionName()); if (createdEntities == null || createdEntities.isEmpty()) { WidgetEntity widgetEntity = new WidgetEntity(); widgetEntity.setClusterId(clusterEntity.getClusterId()); widgetEntity.setClusterEntity(clusterEntity); widgetEntity.setScope(WidgetResourceProvider.SCOPE.CLUSTER.name()); widgetEntity.setWidgetName(layoutInfo.getWidgetName()); widgetEntity.setDefaultSectionName(layoutInfo.getDefaultSectionName()); widgetEntity.setAuthor(user); widgetEntity.setDescription(layoutInfo.getDescription()); widgetEntity.setTimeCreated(createTime); widgetEntity.setWidgetType(layoutInfo.getType()); widgetEntity.setMetrics(gson.toJson(layoutInfo.getMetricsInfo())); widgetEntity.setProperties(gson.toJson(layoutInfo.getProperties())); widgetEntity.setWidgetValues(gson.toJson(layoutInfo.getValues())); widgetEntity.setListWidgetLayoutUserWidgetEntity(new LinkedList<WidgetLayoutUserWidgetEntity>()); LOG.info("Creating cluster widget with: name = " + layoutInfo.getWidgetName() + ", type = " + layoutInfo.getType() + ", " + "cluster = " + clusterEntity.getClusterName()); // Persisting not visible widgets // visible one will be cascaded on creation of layout if (!layoutInfo.isVisible()) { widgetDAO.create(widgetEntity); } return widgetEntity; } else { LOG.warn("Skip creating widget from stack artifact since one or more " + "already exits with name = " + layoutInfo.getWidgetName() + ", " + "clusterId = " + clusterEntity.getClusterId() + ", user = " + user); } return null; } @Transactional void createWidgetsAndLayouts(Cluster cluster, List<WidgetLayout> widgetLayouts) { String user = "ambari"; Long clusterId = cluster.getClusterId(); ClusterEntity clusterEntity = clusterDAO.findById(clusterId); if (clusterEntity == null) { return; } Long now = System.currentTimeMillis(); if (widgetLayouts != null) { for (WidgetLayout widgetLayout : widgetLayouts) { List<WidgetLayoutEntity> existingEntities = widgetLayoutDAO.findByName(clusterId, widgetLayout.getLayoutName(), user); // Update layout properties if the layout exists if (existingEntities == null || existingEntities.isEmpty()) { WidgetLayoutEntity layoutEntity = new WidgetLayoutEntity(); layoutEntity.setClusterEntity(clusterEntity); layoutEntity.setClusterId(clusterId); layoutEntity.setLayoutName(widgetLayout.getLayoutName()); layoutEntity.setDisplayName(widgetLayout.getDisplayName()); layoutEntity.setSectionName(widgetLayout.getSectionName()); layoutEntity.setScope(WidgetLayoutResourceProvider.SCOPE.CLUSTER.name()); layoutEntity.setUserName(user); List<WidgetLayoutUserWidgetEntity> widgetLayoutUserWidgetEntityList = new LinkedList<WidgetLayoutUserWidgetEntity>(); int order = 0; for (WidgetLayoutInfo layoutInfo : widgetLayout.getWidgetLayoutInfoList()) { if (layoutInfo.getDefaultSectionName() == null) { layoutInfo.setDefaultSectionName(layoutEntity.getSectionName()); } WidgetEntity widgetEntity = addIfNotExistsWidgetEntity(layoutInfo, clusterEntity, user, now); // Add to layout if visibility is true and widget was newly added if (widgetEntity != null && layoutInfo.isVisible()) { WidgetLayoutUserWidgetEntity widgetLayoutUserWidgetEntity = new WidgetLayoutUserWidgetEntity(); widgetLayoutUserWidgetEntity.setWidget(widgetEntity); widgetLayoutUserWidgetEntity.setWidgetOrder(order++); widgetLayoutUserWidgetEntity.setWidgetLayout(layoutEntity); widgetLayoutUserWidgetEntityList.add(widgetLayoutUserWidgetEntity); widgetEntity.getListWidgetLayoutUserWidgetEntity().add(widgetLayoutUserWidgetEntity); } } layoutEntity.setListWidgetLayoutUserWidgetEntity(widgetLayoutUserWidgetEntityList); widgetLayoutDAO.createWithFlush(layoutEntity); } else { if (existingEntities.size() > 1) { LOG.warn("Skip updating layout since multiple widget layouts " + "found with: name = " + widgetLayout.getLayoutName() + ", " + "user = " + user + ", cluster = " + cluster.getClusterName()); } else { WidgetLayoutEntity existingLayoutEntity = existingEntities.iterator().next(); existingLayoutEntity.setSectionName(widgetLayout.getSectionName()); existingLayoutEntity.setDisplayName(widgetLayout.getDisplayName()); // Add new widgets to end of the existing ones List<WidgetLayoutUserWidgetEntity> layoutUserWidgetEntities = existingLayoutEntity.getListWidgetLayoutUserWidgetEntity(); if (layoutUserWidgetEntities == null) { layoutUserWidgetEntities = new LinkedList<WidgetLayoutUserWidgetEntity>(); existingLayoutEntity.setListWidgetLayoutUserWidgetEntity(layoutUserWidgetEntities); } int order = layoutUserWidgetEntities.size() - 1; List<WidgetLayoutInfo> layoutInfoList = widgetLayout.getWidgetLayoutInfoList(); if (layoutInfoList != null && !layoutInfoList.isEmpty()) { for (WidgetLayoutInfo layoutInfo : layoutInfoList) { WidgetEntity widgetEntity = addIfNotExistsWidgetEntity(layoutInfo, clusterEntity, user, now); if (widgetEntity != null && layoutInfo.isVisible()) { WidgetLayoutUserWidgetEntity widgetLayoutUserWidgetEntity = new WidgetLayoutUserWidgetEntity(); widgetLayoutUserWidgetEntity.setWidget(widgetEntity); widgetLayoutUserWidgetEntity.setWidgetOrder(order++); widgetLayoutUserWidgetEntity.setWidgetLayout(existingLayoutEntity); layoutUserWidgetEntities.add(widgetLayoutUserWidgetEntity); widgetEntity.getListWidgetLayoutUserWidgetEntity().add(widgetLayoutUserWidgetEntity); } } } widgetLayoutDAO.mergeWithFlush(existingLayoutEntity); } } } } } @Override public TimelineMetricCacheProvider getTimelineMetricCacheProvider() { return injector.getInstance(TimelineMetricCacheProvider.class); } @Override public KerberosHelper getKerberosHelper() { return kerberosHelper; } @Override public CredentialStoreService getCredentialStoreService() { return credentialStoreService; } /** * Queries the CredentialStoreService to gather properties about it. * <p/> * In particular, the details about which storage facilities are avaialble are returned via Boolean * properties. * * @return a map of properties */ public Map<String,String> getCredentialStoreServiceProperties() { Map<String,String> properties = new HashMap<String, String>(); properties.put("storage.persistent", String.valueOf(credentialStoreService.isInitialized(CredentialStoreType.PERSISTED))); properties.put("storage.temporary", String.valueOf(credentialStoreService.isInitialized(CredentialStoreType.TEMPORARY))); return properties; } /** * Validates that the authenticated user can set a service's (run-as) user and group. * <p/> * If the user is authorized to set service users and groups, than this method exits quickly. * If the user is not authorized to set service users and groups, then this method verifies that * the properties of types USER and GROUP have not been changed. If they have been, an * AuthorizationException is thrown. * * @param cluster the relevant cluster * @param request the configuration request * @throws AuthorizationException if the user is not authorized to perform this operation */ protected void validateAuthorizationToUpdateServiceUsersAndGroups(Cluster cluster, ConfigurationRequest request) throws AuthorizationException { // If the authenticated user is not authorized to set service users or groups, make sure the // relevant properties are not changed. However, if the user is authorized to set service // users and groups, there is nothing to check. if (!AuthorizationHelper.isAuthorized(ResourceType.CLUSTER, cluster.getResourceId(), RoleAuthorization.AMBARI_SET_SERVICE_USERS_GROUPS)) { Map<String, String> requestProperties = request.getProperties(); if (requestProperties != null) { Map<PropertyInfo.PropertyType, Set<String>> propertyTypes = cluster.getConfigPropertiesTypes( request.getType()); // Create a composite set of properties to check... Set<String> propertiesToCheck = new HashSet<String>(); Set<String> userProperties = propertyTypes.get(PropertyType.USER); if (userProperties != null) { propertiesToCheck.addAll(userProperties); } Set<String> groupProperties = propertyTypes.get(PropertyType.GROUP); if (groupProperties != null) { propertiesToCheck.addAll(groupProperties); } // If there are no USER or GROUP type properties, skip the validation check... if (!propertiesToCheck.isEmpty()) { Config existingConfig = cluster.getDesiredConfigByType(request.getType()); Map<String, String> existingProperties = (existingConfig == null) ? null : existingConfig.getProperties(); if (existingProperties == null) { existingProperties = Collections.emptyMap(); } for (String propertyName : propertiesToCheck) { String existingProperty = existingProperties.get(propertyName); String requestProperty = requestProperties.get(propertyName); // If the properties don't match, so thrown an authorization exception if ((existingProperty == null) ? (requestProperty != null) : !existingProperty.equals(requestProperty)) { throw new AuthorizationException("The authenticated user is not authorized to set service user and groups"); } } } } } } }
ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ambari.server.controller; import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.AMBARI_DB_RCA_DRIVER; import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.AMBARI_DB_RCA_PASSWORD; import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.AMBARI_DB_RCA_URL; import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.AMBARI_DB_RCA_USERNAME; import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.CLIENTS_TO_UPDATE_CONFIGS; import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.COMMAND_RETRY_ENABLED; import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.COMMAND_TIMEOUT; import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.DB_DRIVER_FILENAME; import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.GROUP_LIST; import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.HOOKS_FOLDER; import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.MAX_DURATION_OF_RETRIES; import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.NOT_MANAGED_HDFS_PATH_LIST; import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.PACKAGE_LIST; import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.PACKAGE_VERSION; import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.REPO_INFO; import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.SCRIPT; import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.SCRIPT_TYPE; import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.SERVICE_PACKAGE_FOLDER; import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.SERVICE_REPO_INFO; import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.USER_LIST; import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.VERSION; import java.io.File; import java.io.FileNotFoundException; import java.io.FileReader; import java.io.IOException; import java.lang.reflect.Type; import java.net.InetAddress; import java.text.MessageFormat; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.EnumMap; import java.util.EnumSet; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.LinkedHashSet; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import java.util.TreeMap; import java.util.concurrent.TimeUnit; import java.util.concurrent.locks.Lock; import org.apache.ambari.server.AmbariException; import org.apache.ambari.server.ClusterNotFoundException; import org.apache.ambari.server.DuplicateResourceException; import org.apache.ambari.server.HostNotFoundException; import org.apache.ambari.server.ObjectNotFoundException; import org.apache.ambari.server.ParentObjectNotFoundException; import org.apache.ambari.server.Role; import org.apache.ambari.server.RoleCommand; import org.apache.ambari.server.ServiceComponentHostNotFoundException; import org.apache.ambari.server.ServiceComponentNotFoundException; import org.apache.ambari.server.ServiceNotFoundException; import org.apache.ambari.server.StackAccessException; import org.apache.ambari.server.actionmanager.ActionManager; import org.apache.ambari.server.actionmanager.HostRoleCommand; import org.apache.ambari.server.actionmanager.RequestFactory; import org.apache.ambari.server.actionmanager.Stage; import org.apache.ambari.server.actionmanager.StageFactory; import org.apache.ambari.server.agent.ExecutionCommand; import org.apache.ambari.server.agent.ExecutionCommand.KeyNames; import org.apache.ambari.server.api.services.AmbariMetaInfo; import org.apache.ambari.server.configuration.Configuration; import org.apache.ambari.server.configuration.Configuration.DatabaseType; import org.apache.ambari.server.controller.internal.RequestOperationLevel; import org.apache.ambari.server.controller.internal.RequestResourceFilter; import org.apache.ambari.server.controller.internal.RequestStageContainer; import org.apache.ambari.server.controller.internal.URLStreamProvider; import org.apache.ambari.server.controller.internal.WidgetLayoutResourceProvider; import org.apache.ambari.server.controller.internal.WidgetResourceProvider; import org.apache.ambari.server.controller.metrics.timeline.cache.TimelineMetricCacheProvider; import org.apache.ambari.server.controller.spi.Resource; import org.apache.ambari.server.customactions.ActionDefinition; import org.apache.ambari.server.metadata.ActionMetadata; import org.apache.ambari.server.metadata.RoleCommandOrder; import org.apache.ambari.server.orm.dao.ClusterDAO; import org.apache.ambari.server.orm.dao.ClusterVersionDAO; import org.apache.ambari.server.orm.dao.RepositoryVersionDAO; import org.apache.ambari.server.orm.dao.WidgetDAO; import org.apache.ambari.server.orm.dao.WidgetLayoutDAO; import org.apache.ambari.server.orm.entities.ClusterEntity; import org.apache.ambari.server.orm.entities.ClusterVersionEntity; import org.apache.ambari.server.orm.entities.OperatingSystemEntity; import org.apache.ambari.server.orm.entities.RepositoryEntity; import org.apache.ambari.server.orm.entities.RepositoryVersionEntity; import org.apache.ambari.server.orm.entities.WidgetEntity; import org.apache.ambari.server.orm.entities.WidgetLayoutEntity; import org.apache.ambari.server.orm.entities.WidgetLayoutUserWidgetEntity; import org.apache.ambari.server.scheduler.ExecutionScheduleManager; import org.apache.ambari.server.security.authorization.AuthorizationException; import org.apache.ambari.server.security.authorization.AuthorizationHelper; import org.apache.ambari.server.security.authorization.Group; import org.apache.ambari.server.security.authorization.ResourceType; import org.apache.ambari.server.security.authorization.RoleAuthorization; import org.apache.ambari.server.security.authorization.User; import org.apache.ambari.server.security.authorization.Users; import org.apache.ambari.server.security.credential.PrincipalKeyCredential; import org.apache.ambari.server.security.encryption.CredentialStoreService; import org.apache.ambari.server.security.encryption.CredentialStoreType; import org.apache.ambari.server.security.ldap.AmbariLdapDataPopulator; import org.apache.ambari.server.security.ldap.LdapBatchDto; import org.apache.ambari.server.security.ldap.LdapSyncDto; import org.apache.ambari.server.serveraction.kerberos.KerberosInvalidConfigurationException; import org.apache.ambari.server.serveraction.kerberos.KerberosOperationException; import org.apache.ambari.server.stageplanner.RoleGraph; import org.apache.ambari.server.stageplanner.RoleGraphFactory; import org.apache.ambari.server.state.Cluster; import org.apache.ambari.server.state.Clusters; import org.apache.ambari.server.state.CommandScriptDefinition; import org.apache.ambari.server.state.ComponentInfo; import org.apache.ambari.server.state.Config; import org.apache.ambari.server.state.ConfigFactory; import org.apache.ambari.server.state.ConfigHelper; import org.apache.ambari.server.state.Host; import org.apache.ambari.server.state.HostComponentAdminState; import org.apache.ambari.server.state.HostState; import org.apache.ambari.server.state.MaintenanceState; import org.apache.ambari.server.state.OperatingSystemInfo; import org.apache.ambari.server.state.PropertyDependencyInfo; import org.apache.ambari.server.state.PropertyInfo; import org.apache.ambari.server.state.PropertyInfo.PropertyType; import org.apache.ambari.server.state.RepositoryInfo; import org.apache.ambari.server.state.RepositoryVersionState; import org.apache.ambari.server.state.SecurityType; import org.apache.ambari.server.state.Service; import org.apache.ambari.server.state.ServiceComponent; import org.apache.ambari.server.state.ServiceComponentFactory; import org.apache.ambari.server.state.ServiceComponentHost; import org.apache.ambari.server.state.ServiceComponentHostEvent; import org.apache.ambari.server.state.ServiceComponentHostFactory; import org.apache.ambari.server.state.ServiceFactory; import org.apache.ambari.server.state.ServiceInfo; import org.apache.ambari.server.state.ServiceOsSpecific; import org.apache.ambari.server.state.StackId; import org.apache.ambari.server.state.StackInfo; import org.apache.ambari.server.state.State; import org.apache.ambari.server.state.configgroup.ConfigGroupFactory; import org.apache.ambari.server.state.repository.VersionDefinitionXml; import org.apache.ambari.server.state.scheduler.RequestExecutionFactory; import org.apache.ambari.server.state.stack.RepositoryXml; import org.apache.ambari.server.state.stack.WidgetLayout; import org.apache.ambari.server.state.stack.WidgetLayoutInfo; import org.apache.ambari.server.state.svccomphost.ServiceComponentHostInstallEvent; import org.apache.ambari.server.state.svccomphost.ServiceComponentHostStartEvent; import org.apache.ambari.server.state.svccomphost.ServiceComponentHostStopEvent; import org.apache.ambari.server.state.svccomphost.ServiceComponentHostUpgradeEvent; import org.apache.ambari.server.utils.SecretReference; import org.apache.ambari.server.utils.StageUtils; import org.apache.commons.collections.CollectionUtils; import org.apache.commons.io.IOUtils; import org.apache.commons.lang.StringUtils; import org.apache.commons.lang.math.NumberUtils; import org.apache.http.client.utils.URIBuilder; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.cache.Cache; import com.google.common.cache.CacheBuilder; import com.google.common.collect.ArrayListMultimap; import com.google.common.collect.Multimap; import com.google.gson.Gson; import com.google.gson.reflect.TypeToken; import com.google.inject.Inject; import com.google.inject.Injector; import com.google.inject.Singleton; import com.google.inject.persist.Transactional; @Singleton public class AmbariManagementControllerImpl implements AmbariManagementController { private final static Logger LOG = LoggerFactory.getLogger(AmbariManagementControllerImpl.class); /** * Property name of request context. */ private static final String REQUEST_CONTEXT_PROPERTY = "context"; private static final String CLUSTER_PHASE_PROPERTY = "phase"; private static final String CLUSTER_PHASE_INITIAL_INSTALL = "INITIAL_INSTALL"; private static final String CLUSTER_PHASE_INITIAL_START = "INITIAL_START"; private static final String BASE_LOG_DIR = "/tmp/ambari"; private final Clusters clusters; private final ActionManager actionManager; private final Injector injector; private final Gson gson; @Inject private ServiceFactory serviceFactory; @Inject private ServiceComponentFactory serviceComponentFactory; @Inject private ServiceComponentHostFactory serviceComponentHostFactory; @Inject private ConfigFactory configFactory; @Inject private StageFactory stageFactory; @Inject private RequestFactory requestFactory; @Inject private ActionMetadata actionMetadata; @Inject private AmbariMetaInfo ambariMetaInfo; @Inject private Users users; @Inject private HostsMap hostsMap; @Inject private Configuration configs; @Inject private AbstractRootServiceResponseFactory rootServiceResponseFactory; @Inject private RoleGraphFactory roleGraphFactory; @Inject private ConfigGroupFactory configGroupFactory; @Inject private ConfigHelper configHelper; @Inject private RequestExecutionFactory requestExecutionFactory; @Inject private ExecutionScheduleManager executionScheduleManager; @Inject private AmbariLdapDataPopulator ldapDataPopulator; @Inject private RepositoryVersionDAO repositoryVersionDAO; @Inject private WidgetDAO widgetDAO; @Inject private WidgetLayoutDAO widgetLayoutDAO; @Inject private ClusterDAO clusterDAO; @Inject private CredentialStoreService credentialStoreService; @Inject private ClusterVersionDAO clusterVersionDAO; private MaintenanceStateHelper maintenanceStateHelper; /** * The KerberosHelper to help setup for enabling for disabling Kerberos */ private KerberosHelper kerberosHelper; final private String masterHostname; final private Integer masterPort; final private String masterProtocol; final private static String JDK_RESOURCE_LOCATION = "/resources/"; final private static int REPO_URL_CONNECT_TIMEOUT = 3000; final private static int REPO_URL_READ_TIMEOUT = 2000; final private String jdkResourceUrl; final private String javaHome; final private String jdkName; final private String jceName; final private String ojdbcUrl; final private String serverDB; final private String mysqljdbcUrl; private boolean ldapSyncInProgress; private Cache<ClusterRequest, ClusterResponse> clusterUpdateCache = CacheBuilder.newBuilder().expireAfterWrite(5, TimeUnit.MINUTES).build(); @Inject private AmbariCustomCommandExecutionHelper customCommandExecutionHelper; @Inject private AmbariActionExecutionHelper actionExecutionHelper; @Inject public AmbariManagementControllerImpl(ActionManager actionManager, Clusters clusters, Injector injector) throws Exception { this.clusters = clusters; this.actionManager = actionManager; this.injector = injector; injector.injectMembers(this); gson = injector.getInstance(Gson.class); LOG.info("Initializing the AmbariManagementControllerImpl"); masterHostname = InetAddress.getLocalHost().getCanonicalHostName(); maintenanceStateHelper = injector.getInstance(MaintenanceStateHelper.class); kerberosHelper = injector.getInstance(KerberosHelper.class); if(configs != null) { if (configs.getApiSSLAuthentication()) { masterProtocol = "https"; masterPort = configs.getClientSSLApiPort(); } else { masterProtocol = "http"; masterPort = configs.getClientApiPort(); } jdkResourceUrl = getAmbariServerURI(JDK_RESOURCE_LOCATION); javaHome = configs.getJavaHome(); jdkName = configs.getJDKName(); jceName = configs.getJCEName(); ojdbcUrl = getAmbariServerURI(JDK_RESOURCE_LOCATION + "/" + configs.getOjdbcJarName()); mysqljdbcUrl = getAmbariServerURI(JDK_RESOURCE_LOCATION + "/" + configs.getMySQLJarName()); serverDB = configs.getServerDBName(); } else { masterProtocol = null; masterPort = null; jdkResourceUrl = null; javaHome = null; jdkName = null; jceName = null; ojdbcUrl = null; mysqljdbcUrl = null; serverDB = null; } } @Override public String getAmbariServerURI(String path) { if(masterProtocol==null || masterHostname==null || masterPort==null) { return null; } URIBuilder uriBuilder = new URIBuilder(); uriBuilder.setScheme(masterProtocol); uriBuilder.setHost(masterHostname); uriBuilder.setPort(masterPort); String[] parts = path.split("\\?"); if (parts.length > 1) { uriBuilder.setPath(parts[0]); uriBuilder.setQuery(parts[1]); } else { uriBuilder.setPath(path); } return uriBuilder.toString(); } @Override public RoleCommandOrder getRoleCommandOrder(Cluster cluster) { RoleCommandOrder rco; rco = injector.getInstance(RoleCommandOrder.class); rco.initialize(cluster); return rco; } @Override public void createCluster(ClusterRequest request) throws AmbariException { if (request.getClusterName() == null || request.getClusterName().isEmpty() || request.getClusterId() != null) { throw new IllegalArgumentException("Cluster name should be provided" + " and clusterId should be null"); } if (LOG.isDebugEnabled()) { LOG.debug("Received a createCluster request" + ", clusterName=" + request.getClusterName() + ", request=" + request); } if (request.getStackVersion() == null || request.getStackVersion().isEmpty()) { throw new IllegalArgumentException("Stack information should be" + " provided when creating a cluster"); } StackId stackId = new StackId(request.getStackVersion()); StackInfo stackInfo = ambariMetaInfo.getStack(stackId.getStackName(), stackId.getStackVersion()); if (stackInfo == null) { throw new StackAccessException("stackName=" + stackId.getStackName() + ", stackVersion=" + stackId.getStackVersion()); } RepositoryVersionEntity versionEntity = null; if (null != request.getRepositoryVersion()) { versionEntity = repositoryVersionDAO.findByStackAndVersion(stackId, request.getRepositoryVersion()); if (null == versionEntity) { throw new AmbariException(String.format("Tried to create a cluster on version %s, but that version doesn't exist", request.getRepositoryVersion())); } } // FIXME add support for desired configs at cluster level boolean foundInvalidHosts = false; StringBuilder invalidHostsStr = new StringBuilder(); if (request.getHostNames() != null) { for (String hostname : request.getHostNames()) { try { clusters.getHost(hostname); } catch (HostNotFoundException e) { if (foundInvalidHosts) { invalidHostsStr.append(","); } foundInvalidHosts = true; invalidHostsStr.append(hostname); } } } if (foundInvalidHosts) { throw new HostNotFoundException(invalidHostsStr.toString()); } clusters.addCluster(request.getClusterName(), stackId, request.getSecurityType()); Cluster c = clusters.getCluster(request.getClusterName()); if (request.getHostNames() != null) { clusters.mapHostsToCluster(request.getHostNames(), request.getClusterName()); } // Create cluster widgets and layouts initializeWidgetsAndLayouts(c, null); if (null != versionEntity) { ClusterVersionDAO clusterVersionDAO = injector.getInstance(ClusterVersionDAO.class); ClusterVersionEntity clusterVersion = clusterVersionDAO.findByClusterAndStackAndVersion(request.getClusterName(), stackId, request.getRepositoryVersion()); if (null == clusterVersion) { c.createClusterVersion(stackId, versionEntity.getVersion(), getAuthName(), RepositoryVersionState.INIT); } } } @Override public synchronized void createHostComponents(Set<ServiceComponentHostRequest> requests) throws AmbariException, AuthorizationException { if (requests.isEmpty()) { LOG.warn("Received an empty requests set"); return; } // do all validation checks Map<String, Map<String, Map<String, Set<String>>>> hostComponentNames = new HashMap<String, Map<String, Map<String, Set<String>>>>(); Set<String> duplicates = new HashSet<String>(); for (ServiceComponentHostRequest request : requests) { validateServiceComponentHostRequest(request); Cluster cluster; try { cluster = clusters.getCluster(request.getClusterName()); } catch (ClusterNotFoundException e) { throw new ParentObjectNotFoundException( "Attempted to add a host_component to a cluster which doesn't exist: ", e); } if(!AuthorizationHelper.isAuthorized(ResourceType.CLUSTER, cluster.getResourceId(), EnumSet.of(RoleAuthorization.SERVICE_ADD_DELETE_SERVICES,RoleAuthorization.HOST_ADD_DELETE_COMPONENTS))) { throw new AuthorizationException("The authenticated user is not authorized to install service components on to hosts"); } if (StringUtils.isEmpty(request.getServiceName())) { request.setServiceName(findServiceName(cluster, request.getComponentName())); } if (LOG.isDebugEnabled()) { LOG.debug("Received a createHostComponent request" + ", clusterName=" + request.getClusterName() + ", serviceName=" + request.getServiceName() + ", componentName=" + request.getComponentName() + ", hostname=" + request.getHostname() + ", request=" + request); } if (!hostComponentNames.containsKey(request.getClusterName())) { hostComponentNames.put(request.getClusterName(), new HashMap<String, Map<String,Set<String>>>()); } if (!hostComponentNames.get(request.getClusterName()) .containsKey(request.getServiceName())) { hostComponentNames.get(request.getClusterName()).put( request.getServiceName(), new HashMap<String, Set<String>>()); } if (!hostComponentNames.get(request.getClusterName()) .get(request.getServiceName()) .containsKey(request.getComponentName())) { hostComponentNames.get(request.getClusterName()) .get(request.getServiceName()).put(request.getComponentName(), new HashSet<String>()); } if (hostComponentNames.get(request.getClusterName()) .get(request.getServiceName()) .get(request.getComponentName()) .contains(request.getHostname())) { duplicates.add("[clusterName=" + request.getClusterName() + ", hostName=" + request.getHostname() + ", componentName=" +request.getComponentName() +']'); continue; } hostComponentNames.get(request.getClusterName()) .get(request.getServiceName()).get(request.getComponentName()) .add(request.getHostname()); if (request.getDesiredState() != null && !request.getDesiredState().isEmpty()) { State state = State.valueOf(request.getDesiredState()); if (!state.isValidDesiredState() || state != State.INIT) { throw new IllegalArgumentException("Invalid desired state" + " only INIT state allowed during creation" + ", providedDesiredState=" + request.getDesiredState()); } } Service s; try { s = cluster.getService(request.getServiceName()); } catch (ServiceNotFoundException e) { throw new IllegalArgumentException( "The service[" + request.getServiceName() + "] associated with the component[" + request.getComponentName() + "] doesn't exist for the cluster[" + request.getClusterName() + "]"); } ServiceComponent sc = s.getServiceComponent( request.getComponentName()); setRestartRequiredServices(s, request.getComponentName()); Host host; try { host = clusters.getHost(request.getHostname()); } catch (HostNotFoundException e) { throw new ParentObjectNotFoundException( "Attempted to add a host_component to a host that doesn't exist: ", e); } Set<Cluster> mappedClusters = clusters.getClustersForHost(request.getHostname()); boolean validCluster = false; if (LOG.isDebugEnabled()) { LOG.debug("Looking to match host to cluster" + ", hostnameViaReg=" + host.getHostName() + ", hostname=" + request.getHostname() + ", clusterName=" + request.getClusterName() + ", hostClusterMapCount=" + mappedClusters.size()); } for (Cluster mappedCluster : mappedClusters) { if (LOG.isDebugEnabled()) { LOG.debug("Host belongs to cluster" + ", hostname=" + request.getHostname() + ", clusterName=" + mappedCluster.getClusterName()); } if (mappedCluster.getClusterName().equals( request.getClusterName())) { validCluster = true; break; } } if (!validCluster) { throw new ParentObjectNotFoundException("Attempted to add a host_component to a host that doesn't exist: " + "clusterName=" + request.getClusterName() + ", hostName=" + request.getHostname()); } try { ServiceComponentHost sch = sc.getServiceComponentHost( request.getHostname()); if (sch != null) { duplicates.add("[clusterName=" + request.getClusterName() + ", hostName=" + request.getHostname() + ", componentName=" +request.getComponentName() +']'); } } catch (AmbariException e) { // Expected } } // ensure only a single cluster update if (hostComponentNames.size() != 1) { throw new IllegalArgumentException("Invalid arguments - updates allowed" + " on only one cluster at a time"); } if (!duplicates.isEmpty()) { StringBuilder names = new StringBuilder(); boolean first = true; for (String hName : duplicates) { if (!first) { names.append(","); } first = false; names.append(hName); } String msg; if (duplicates.size() == 1) { msg = "Attempted to create a host_component which already exists: "; } else { msg = "Attempted to create host_component's which already exist: "; } throw new DuplicateResourceException(msg + names.toString()); } // set restartRequired flag for monitoring services setMonitoringServicesRestartRequired(requests); // now doing actual work persistServiceComponentHosts(requests); } void persistServiceComponentHosts(Set<ServiceComponentHostRequest> requests) throws AmbariException { Multimap<Cluster, ServiceComponentHost> schMap = ArrayListMultimap.create(); for (ServiceComponentHostRequest request : requests) { Cluster cluster = clusters.getCluster(request.getClusterName()); Service s = cluster.getService(request.getServiceName()); ServiceComponent sc = s.getServiceComponent( request.getComponentName()); ServiceComponentHost sch = serviceComponentHostFactory.createNew(sc, request.getHostname()); if (request.getDesiredState() != null && !request.getDesiredState().isEmpty()) { State state = State.valueOf(request.getDesiredState()); sch.setDesiredState(state); } sch.setDesiredStackVersion(sc.getDesiredStackVersion()); schMap.put(cluster, sch); } for (Cluster cluster : schMap.keySet()) { cluster.addServiceComponentHosts(schMap.get(cluster)); } } private void setMonitoringServicesRestartRequired( Set<ServiceComponentHostRequest> requests) throws AmbariException { for (ServiceComponentHostRequest request : requests) { Cluster cluster = clusters.getCluster(request.getClusterName()); StackId stackId = cluster.getCurrentStackVersion(); Collection<String> monitoringServices = ambariMetaInfo.getMonitoringServiceNames( stackId.getStackName(), stackId.getStackVersion()); for (String serviceName : monitoringServices) { if (cluster.getServices().containsKey(serviceName)) { Service service = cluster.getService(serviceName); for (ServiceComponent sc : service.getServiceComponents().values()) { if (sc.isMasterComponent()) { for (ServiceComponentHost sch : sc.getServiceComponentHosts().values()) { sch.setRestartRequired(true); } continue; } String hostname = request.getHostname(); if (sc.getServiceComponentHosts().containsKey(hostname)) { ServiceComponentHost sch = sc.getServiceComponentHost(hostname); sch.setRestartRequired(true); } } } } } } private void setRestartRequiredServices( Service service, String componentName) throws AmbariException { Cluster cluster = service.getCluster(); StackId stackId = cluster.getCurrentStackVersion(); if (service.getServiceComponent(componentName).isClientComponent()) { return; } Set<String> needRestartServices = ambariMetaInfo.getRestartRequiredServicesNames( stackId.getStackName(), stackId.getStackVersion()); if(needRestartServices.contains(service.getName())) { Map<String, ServiceComponent> m = service.getServiceComponents(); for (Entry<String, ServiceComponent> entry : m.entrySet()) { ServiceComponent serviceComponent = entry.getValue(); Map<String, ServiceComponentHost> schMap = serviceComponent.getServiceComponentHosts(); for (Entry<String, ServiceComponentHost> sch : schMap.entrySet()) { ServiceComponentHost serviceComponentHost = sch.getValue(); serviceComponentHost.setRestartRequired(true); } } } } @Override public void registerRackChange(String clusterName) throws AmbariException { Cluster cluster = clusters.getCluster(clusterName); StackId stackId = cluster.getCurrentStackVersion(); Set<String> rackSensitiveServices = ambariMetaInfo.getRackSensitiveServicesNames(stackId.getStackName(), stackId.getStackVersion()); Map<String, Service> services = cluster.getServices(); for (Service service : services.values()) { if(rackSensitiveServices.contains(service.getName())) { Map<String, ServiceComponent> serviceComponents = service.getServiceComponents(); for (ServiceComponent serviceComponent : serviceComponents.values()) { Map<String, ServiceComponentHost> schMap = serviceComponent.getServiceComponentHosts(); for (Entry<String, ServiceComponentHost> sch : schMap.entrySet()) { ServiceComponentHost serviceComponentHost = sch.getValue(); serviceComponentHost.setRestartRequired(true); } } } } } @Override public synchronized ConfigurationResponse createConfiguration( ConfigurationRequest request) throws AmbariException, AuthorizationException { if (null == request.getClusterName() || request.getClusterName().isEmpty() || null == request.getType() || request.getType().isEmpty() || null == request.getProperties()) { throw new IllegalArgumentException("Invalid Arguments," + " clustername, config type and configs should not" + " be null or empty"); } Cluster cluster = clusters.getCluster(request.getClusterName()); String configType = request.getType(); // If the config type is for a service, then allow a user with SERVICE_MODIFY_CONFIGS to // update, else ensure the user has CLUSTER_MODIFY_CONFIGS String service = null; try { service = cluster.getServiceForConfigTypes(Collections.singleton(configType)); } catch (IllegalArgumentException e) { // Ignore this since we may have hit a config type that spans multiple services. This may // happen in unit test cases but should not happen with later versions of stacks. } if(StringUtils.isEmpty(service)) { if (!AuthorizationHelper.isAuthorized(ResourceType.CLUSTER, cluster.getResourceId(), EnumSet.of(RoleAuthorization.CLUSTER_MODIFY_CONFIGS))) { throw new AuthorizationException("The authenticated user does not have authorization " + "to create cluster configurations"); } } else { if (!AuthorizationHelper.isAuthorized(ResourceType.CLUSTER, cluster.getResourceId(), EnumSet.of(RoleAuthorization.SERVICE_MODIFY_CONFIGS))) { throw new AuthorizationException("The authenticated user does not have authorization " + "to create service configurations"); } } Map<String, String> requestProperties = request.getProperties(); Map<PropertyInfo.PropertyType, Set<String>> propertiesTypes = cluster.getConfigPropertiesTypes(request.getType()); if(propertiesTypes.containsKey(PropertyType.PASSWORD)) { for(String passwordProperty : propertiesTypes.get(PropertyType.PASSWORD)) { if(requestProperties.containsKey(passwordProperty)) { String passwordPropertyValue = requestProperties.get(passwordProperty); if (!SecretReference.isSecret(passwordPropertyValue)) { continue; } SecretReference ref = new SecretReference(passwordPropertyValue, cluster); String refValue = ref.getValue(); requestProperties.put(passwordProperty, refValue); } } } Map<String, Config> configs = cluster.getConfigsByType( request.getType()); if (null == configs) { configs = new HashMap<String, Config>(); } // Configuration attributes are optional. If not present, use empty map Map<String, Map<String, String>> propertiesAttributes = request.getPropertiesAttributes(); if (null == propertiesAttributes) { propertiesAttributes = new HashMap<String, Map<String,String>>(); } if (configs.containsKey(request.getVersionTag())) { throw new AmbariException(MessageFormat.format("Configuration with tag ''{0}'' exists for ''{1}''", request.getVersionTag(), request.getType())); } handleGlobalsBackwardsCompability(request, propertiesAttributes); Config config = createConfig(cluster, request.getType(), requestProperties, request.getVersionTag(), propertiesAttributes); return new ConfigurationResponse(cluster.getClusterName(), config); } private void handleGlobalsBackwardsCompability(ConfigurationRequest request, Map<String, Map<String, String>> propertiesAttributes) throws AmbariException { Cluster cluster = clusters.getCluster(request.getClusterName()); if(request.getType().equals(Configuration.GLOBAL_CONFIG_TAG)) { Map<String, Map<String, String>> configTypes = new HashMap<String, Map<String, String>>(); configTypes.put(Configuration.GLOBAL_CONFIG_TAG, request.getProperties()); configHelper.moveDeprecatedGlobals(cluster.getCurrentStackVersion(), configTypes, cluster.getClusterName()); for(Map.Entry<String, Map<String, String>> configType : configTypes.entrySet()) { String configTypeName = configType.getKey(); Map<String, String> properties = configType.getValue(); if(configTypeName.equals(Configuration.GLOBAL_CONFIG_TAG)) { continue; } String tag; if(cluster.getConfigsByType(configTypeName) == null) { tag = "version1"; } else { tag = "version" + System.currentTimeMillis(); } Config config = createConfig(cluster, configTypeName, properties, tag, propertiesAttributes); if (config != null) { String authName = getAuthName(); if (cluster.addDesiredConfig(authName, Collections.singleton(config)) != null) { LOG.info("cluster '" + cluster.getClusterName() + "' " + "changed by: '" + authName + "'; " + "type='" + config.getType() + "' " + "tag='" + config.getTag()); } } } } } @Override public Config createConfig(Cluster cluster, String type, Map<String, String> properties, String versionTag, Map<String, Map<String, String>> propertiesAttributes) { Config config = configFactory.createNew(cluster, type, properties, propertiesAttributes); if (!StringUtils.isEmpty(versionTag)) { config.setTag(versionTag); } config.persist(); cluster.addConfig(config); return config; } @Override public void createUsers(Set<UserRequest> requests) throws AmbariException { for (UserRequest request : requests) { if (null == request.getUsername() || request.getUsername().isEmpty() || null == request.getPassword() || request.getPassword().isEmpty()) { throw new AmbariException("Username and password must be supplied."); } users.createUser(request.getUsername(), request.getPassword(), request.isActive(), request.isAdmin(), false); } } @Override public void createGroups(Set<GroupRequest> requests) throws AmbariException { for (GroupRequest request : requests) { if (StringUtils.isBlank(request.getGroupName())) { throw new AmbariException("Group name must be supplied."); } final Group group = users.getGroup(request.getGroupName()); if (group != null) { throw new AmbariException("Group already exists."); } users.createGroup(request.getGroupName()); } } @Override public void createMembers(Set<MemberRequest> requests) throws AmbariException { for (MemberRequest request : requests) { if (StringUtils.isBlank(request.getGroupName()) || StringUtils.isBlank(request.getUserName())) { throw new AmbariException("Both group name and user name must be supplied."); } users.addMemberToGroup(request.getGroupName(), request.getUserName()); } } @Override public Set<MemberResponse> getMembers(Set<MemberRequest> requests) throws AmbariException { final Set<MemberResponse> responses = new HashSet<MemberResponse>(); for (MemberRequest request: requests) { LOG.debug("Received a getMembers request, " + request.toString()); final Group group = users.getGroup(request.getGroupName()); if (null == group) { if (requests.size() == 1) { // only throw exception if there is a single request // if there are multiple requests, this indicates an OR predicate throw new ObjectNotFoundException("Cannot find group '" + request.getGroupName() + "'"); } } else { for (User user: users.getGroupMembers(group.getGroupName())) { final MemberResponse response = new MemberResponse(group.getGroupName(), user.getUserName()); responses.add(response); } } } return responses; } @Override @SuppressWarnings("unchecked") public synchronized void updateMembers(Set<MemberRequest> requests) throws AmbariException { // validate String groupName = null; for (MemberRequest request: requests) { if (groupName != null && !request.getGroupName().equals(groupName)) { throw new AmbariException("Can't manage members of different groups in one request"); } groupName = request.getGroupName(); } final List<String> requiredMembers = new ArrayList<String>(); for (MemberRequest request: requests) { if (request.getUserName() != null) { requiredMembers.add(request.getUserName()); } } final List<String> currentMembers = users.getAllMembers(groupName); for (String user: (Collection<String>) CollectionUtils.subtract(currentMembers, requiredMembers)) { users.removeMemberFromGroup(groupName, user); } for (String user: (Collection<String>) CollectionUtils.subtract(requiredMembers, currentMembers)) { users.addMemberToGroup(groupName, user); } } private Stage createNewStage(long id, Cluster cluster, long requestId, String requestContext, String clusterHostInfo, String commandParamsStage, String hostParamsStage) { String logDir = BASE_LOG_DIR + File.pathSeparator + requestId; Stage stage = stageFactory.createNew(requestId, logDir, null == cluster ? null : cluster.getClusterName(), null == cluster ? -1L : cluster.getClusterId(), requestContext, clusterHostInfo, commandParamsStage, hostParamsStage); stage.setStageId(id); return stage; } private Set<ClusterResponse> getClusters(ClusterRequest request) throws AmbariException, AuthorizationException { Set<ClusterResponse> response = new HashSet<ClusterResponse>(); if (LOG.isDebugEnabled()) { LOG.debug("Received a getClusters request" + ", clusterName=" + request.getClusterName() + ", clusterId=" + request.getClusterId() + ", stackInfo=" + request.getStackVersion()); } Cluster singleCluster = null; try { if (request.getClusterName() != null) { singleCluster = clusters.getCluster(request.getClusterName()); } else if (request.getClusterId() != null) { singleCluster = clusters.getClusterById(request.getClusterId()); } } catch(ClusterNotFoundException e) { // the user shouldn't know the difference between a cluster that does not exist or one that // he doesn't have access to. if (AuthorizationHelper.isAuthorized(ResourceType.AMBARI, null, RoleAuthorization.AMBARI_ADD_DELETE_CLUSTERS)) { throw e; } else { throw new AuthorizationException(); } } if (singleCluster != null) { ClusterResponse cr = singleCluster.convertToResponse(); cr.setDesiredConfigs(singleCluster.getDesiredConfigs()); cr.setDesiredServiceConfigVersions(singleCluster.getActiveServiceConfigVersions()); cr.setCredentialStoreServiceProperties(getCredentialStoreServiceProperties()); // If the user is authorized to view information about this cluster, add it to the response // TODO: Uncomment this when the UI doesn't require view access for View-only users. // if (AuthorizationHelper.isAuthorized(ResourceType.CLUSTER, cr.getResourceId(), // RoleAuthorization.AUTHORIZATIONS_VIEW_CLUSTER)) { response.add(cr); // } // else { // // the user shouldn't know the difference between a cluster that does not exist or one that // // he doesn't have access to. // throw new AuthorizationException(); // } return response; } Map<String, Cluster> allClusters = clusters.getClusters(); for (Cluster c : allClusters.values()) { if (request.getStackVersion() != null) { if (!request.getStackVersion().equals( c.getDesiredStackVersion().getStackId())) { // skip non matching stack versions continue; } } // TODO: Uncomment this when the UI doesn't require view access for View-only users. // If the user is authorized to view information about this cluster, add it to the response // if (AuthorizationHelper.isAuthorized(ResourceType.CLUSTER, c.getResourceId(), // RoleAuthorization.AUTHORIZATIONS_VIEW_CLUSTER)) { ClusterResponse cr = c.convertToResponse(); cr.setDesiredConfigs(c.getDesiredConfigs()); cr.setDesiredServiceConfigVersions(c.getActiveServiceConfigVersions()); cr.setCredentialStoreServiceProperties(getCredentialStoreServiceProperties()); response.add(cr); // } } StringBuilder builder = new StringBuilder(); if (LOG.isDebugEnabled()) { clusters.debugDump(builder); LOG.debug("Cluster State for cluster " + builder.toString()); } return response; } private Set<ServiceComponentHostResponse> getHostComponents( ServiceComponentHostRequest request) throws AmbariException { LOG.debug("Processing request {}", request); if (request.getClusterName() == null || request.getClusterName().isEmpty()) { IllegalArgumentException e = new IllegalArgumentException("Invalid arguments, cluster name should not be null"); LOG.debug("Cluster not specified in request", e); throw e; } final Cluster cluster; try { cluster = clusters.getCluster(request.getClusterName()); } catch (ClusterNotFoundException e) { LOG.error("Cluster not found ", e); throw new ParentObjectNotFoundException("Parent Cluster resource doesn't exist", e); } if (request.getHostname() != null) { try { if (!clusters.getClustersForHost(request.getHostname()).contains(cluster)) { // case where host exists but not associated with given cluster LOG.error("Host doesn't belong to cluster - " + request.getHostname()); throw new ParentObjectNotFoundException("Parent Host resource doesn't exist", new HostNotFoundException(request.getClusterName(), request.getHostname())); } } catch (HostNotFoundException e) { LOG.error("Host not found", e); // creating new HostNotFoundException to add cluster name throw new ParentObjectNotFoundException("Parent Host resource doesn't exist", new HostNotFoundException(request.getClusterName(), request.getHostname())); } } if (request.getComponentName() != null) { if (request.getServiceName() == null || request.getServiceName().isEmpty()) { StackId stackId = cluster.getDesiredStackVersion(); String serviceName = ambariMetaInfo.getComponentToService(stackId.getStackName(), stackId.getStackVersion(), request.getComponentName()); if (LOG.isDebugEnabled()) { LOG.debug("Looking up service name for component" + ", componentName=" + request.getComponentName() + ", serviceName=" + serviceName + ", stackInfo=" + stackId.getStackId()); } if (serviceName == null || serviceName.isEmpty()) { LOG.error("Unable to find service for component {}", request.getComponentName()); throw new ServiceComponentHostNotFoundException( cluster.getClusterName(), null, request.getComponentName(), request.getHostname()); } request.setServiceName(serviceName); } } Set<Service> services = new HashSet<Service>(); if (request.getServiceName() != null && !request.getServiceName().isEmpty()) { services.add(cluster.getService(request.getServiceName())); } else { services.addAll(cluster.getServices().values()); } Set<ServiceComponentHostResponse> response = new HashSet<ServiceComponentHostResponse>(); boolean checkDesiredState = false; State desiredStateToCheck = null; boolean checkState = false; State stateToCheck = null; boolean filterBasedConfigStaleness = false; boolean staleConfig = true; if (request.getStaleConfig() != null) { filterBasedConfigStaleness = true; staleConfig = "true".equals(request.getStaleConfig().toLowerCase()); } if (request.getDesiredState() != null && !request.getDesiredState().isEmpty()) { desiredStateToCheck = State.valueOf(request.getDesiredState()); if (!desiredStateToCheck.isValidDesiredState()) { throw new IllegalArgumentException("Invalid arguments, invalid desired" + " state, desiredState=" + desiredStateToCheck); } checkDesiredState = true; } if (!StringUtils.isEmpty(request.getState())) { stateToCheck = State.valueOf(request.getState()); // maybe check should be more wider if (stateToCheck == null) { throw new IllegalArgumentException("Invalid arguments, invalid state, State=" + request.getState()); } checkState = true; } Map<String, Host> hosts = clusters.getHostsForCluster(cluster.getClusterName()); for (Service s : services) { // filter on component name if provided Set<ServiceComponent> components = new HashSet<ServiceComponent>(); if (request.getComponentName() != null) { components.add(s.getServiceComponent(request.getComponentName())); } else { components.addAll(s.getServiceComponents().values()); } for (ServiceComponent sc : components) { if (request.getComponentName() != null) { if (!sc.getName().equals(request.getComponentName())) { continue; } } // filter on hostname if provided // filter on desired state if provided Map<String, ServiceComponentHost> serviceComponentHostMap = sc.getServiceComponentHosts(); if (request.getHostname() != null) { try { if (serviceComponentHostMap == null || !serviceComponentHostMap.containsKey(request.getHostname())) { throw new ServiceComponentHostNotFoundException(cluster.getClusterName(), s.getName(), sc.getName(), request.getHostname()); } ServiceComponentHost sch = serviceComponentHostMap.get(request.getHostname()); if (null == sch) { // It's possible that the host was deleted during the time that the request was generated. continue; } if (checkDesiredState && (desiredStateToCheck != sch.getDesiredState())) { continue; } if (checkState && stateToCheck != sch.getState()) { continue; } if (request.getAdminState() != null) { String stringToMatch = sch.getComponentAdminState() == null ? "" : sch.getComponentAdminState().name(); if (!request.getAdminState().equals(stringToMatch)) { continue; } } ServiceComponentHostResponse r = sch.convertToResponse(); if (null == r || (filterBasedConfigStaleness && r.isStaleConfig() != staleConfig)) { continue; } Host host = hosts.get(sch.getHostName()); if (host == null) { throw new HostNotFoundException(cluster.getClusterName(), sch.getHostName()); } r.setMaintenanceState(maintenanceStateHelper.getEffectiveState(sch, host).name()); response.add(r); } catch (ServiceComponentHostNotFoundException e) { if (request.getServiceName() == null || request.getComponentName() == null) { // Ignore the exception if either the service name or component name are not specified. // This is an artifact of how we get host_components and can happen in the case where // we get all host_components for a host, for example. LOG.debug("Ignoring not specified host_component ", e); } else { // Otherwise rethrow the exception and let the caller decide if it's an error condition. // Logging the exception as debug since this does not necessarily indicate an error // condition. LOG.debug("ServiceComponentHost not found ", e); throw new ServiceComponentHostNotFoundException(cluster.getClusterName(), request.getServiceName(), request.getComponentName(), request.getHostname()); } } } else { for (ServiceComponentHost sch : serviceComponentHostMap.values()) { if (null == sch) { // It's possible that the host was deleted during the time that the request was generated. continue; } if (checkDesiredState && (desiredStateToCheck != sch.getDesiredState())) { continue; } if (checkState && stateToCheck != sch.getState()) { continue; } if (request.getAdminState() != null) { String stringToMatch = sch.getComponentAdminState() == null ? "" : sch.getComponentAdminState().name(); if (!request.getAdminState().equals(stringToMatch)) { continue; } } ServiceComponentHostResponse r = sch.convertToResponse(); if (null == r || (filterBasedConfigStaleness && r.isStaleConfig() != staleConfig)) { continue; } Host host = hosts.get(sch.getHostName()); if (host == null) { throw new HostNotFoundException(cluster.getClusterName(), sch.getHostName()); } r.setMaintenanceState(maintenanceStateHelper.getEffectiveState(sch, host).name()); response.add(r); } } } } return response; } @Override public MaintenanceState getEffectiveMaintenanceState(ServiceComponentHost sch) throws AmbariException { return maintenanceStateHelper.getEffectiveState(sch); } private Set<ConfigurationResponse> getConfigurations( ConfigurationRequest request) throws AmbariException { if (request.getClusterName() == null) { throw new IllegalArgumentException("Invalid arguments, cluster name" + " should not be null"); } Cluster cluster = clusters.getCluster(request.getClusterName()); Set<ConfigurationResponse> responses = new HashSet<ConfigurationResponse>(); // !!! if only one, then we need full properties if (null != request.getType() && null != request.getVersionTag()) { Config config = cluster.getConfig(request.getType(), request.getVersionTag()); if (null != config) { ConfigurationResponse response = new ConfigurationResponse( cluster.getClusterName(), config); responses.add(response); } } else { boolean includeProps = request.includeProperties(); if (null != request.getType()) { Map<String, Config> configs = cluster.getConfigsByType( request.getType()); if (null != configs) { for (Entry<String, Config> entry : configs.entrySet()) { Config config = entry.getValue(); ConfigurationResponse response = new ConfigurationResponse( cluster.getClusterName(), config.getStackId(), request.getType(), config.getTag(), entry.getValue().getVersion(), includeProps ? config.getProperties() : new HashMap<String, String>(), includeProps ? config.getPropertiesAttributes() : new HashMap<String, Map<String,String>>(), config.getPropertiesTypes()); responses.add(response); } } } else { // !!! all configuration Collection<Config> all = cluster.getAllConfigs(); for (Config config : all) { ConfigurationResponse response = new ConfigurationResponse( cluster.getClusterName(), config.getStackId(), config.getType(), config.getTag(), config.getVersion(), includeProps ? config.getProperties() : new HashMap<String, String>(), includeProps ? config.getPropertiesAttributes() : new HashMap<String, Map<String,String>>(), config.getPropertiesTypes()); responses.add(response); } } } return responses; } @Override public synchronized RequestStatusResponse updateClusters(Set<ClusterRequest> requests, Map<String, String> requestProperties) throws AmbariException, AuthorizationException { RequestStatusResponse response = null; // We have to allow for multiple requests to account for multiple // configuration updates (create multiple configuration resources)... for (ClusterRequest request : requests) { // TODO : Is there ever a real world case where we could have multiple non-null responses? // *************************************************** // set any session attributes for this cluster request Cluster cluster; if (request.getClusterId() == null) { cluster = clusters.getCluster(request.getClusterName()); } else { cluster = clusters.getClusterById(request.getClusterId()); } if (cluster == null) { throw new AmbariException("The cluster may not be null"); } Map<String, Object> sessionAttributes = request.getSessionAttributes(); // TODO: Once the UI uses the Credential Resource API, remove this block to _clean_ the // TODO: session attributes and store any KDC administrator credentials in the secure // TODO: credential provider facility. // For now, to keep things backwards compatible, get and remove the KDC administrator credentials // from the session attributes and store them in the CredentialsProvider. The KDC administrator // credentials are prefixed with kdc_admin/. The following attributes are expected, if setting // the KDC administrator credentials: // kerberos_admin/principal // kerberos_admin/password if((sessionAttributes != null) && !sessionAttributes.isEmpty()) { Map<String, Object> cleanedSessionAttributes = new HashMap<String, Object>(); String principal = null; char[] password = null; for(Map.Entry<String,Object> entry: sessionAttributes.entrySet()) { String name = entry.getKey(); Object value = entry.getValue(); if ("kerberos_admin/principal".equals(name)) { if(value instanceof String) { principal = (String)value; } } else if ("kerberos_admin/password".equals(name)) { if(value instanceof String) { password = ((String) value).toCharArray(); } } else { cleanedSessionAttributes.put(name, value); } } if(principal != null) { // The KDC admin principal exists... set the credentials in the credentials store credentialStoreService.setCredential(cluster.getClusterName(), KerberosHelper.KDC_ADMINISTRATOR_CREDENTIAL_ALIAS, new PrincipalKeyCredential(principal, password), CredentialStoreType.TEMPORARY); } sessionAttributes = cleanedSessionAttributes; } // TODO: END cluster.addSessionAttributes(sessionAttributes); // // *************************************************** response = updateCluster(request, requestProperties); } return response; } private synchronized RequestStatusResponse updateCluster(ClusterRequest request, Map<String, String> requestProperties) throws AmbariException, AuthorizationException { RequestStageContainer requestStageContainer = null; if (request.getClusterId() == null && (request.getClusterName() == null || request.getClusterName().isEmpty())) { throw new IllegalArgumentException("Invalid arguments, cluster id or cluster name should not be null"); } LOG.info("Received a updateCluster request" + ", clusterId=" + request.getClusterId() + ", clusterName=" + request.getClusterName() + ", securityType=" + request.getSecurityType() + ", request=" + request); final Cluster cluster; if (request.getClusterId() == null) { cluster = clusters.getCluster(request.getClusterName()); } else { cluster = clusters.getClusterById(request.getClusterId()); } // Ensure the user has access to update this cluster AuthorizationHelper.verifyAuthorization(ResourceType.CLUSTER, cluster.getResourceId(), RoleAuthorization.AUTHORIZATIONS_UPDATE_CLUSTER); //save data to return configurations created List<ConfigurationResponse> configurationResponses = new LinkedList<ConfigurationResponse>(); ServiceConfigVersionResponse serviceConfigVersionResponse = null; if (request.getDesiredConfig() != null && request.getServiceConfigVersionRequest() != null) { String msg = "Unable to set desired configs and rollback at same time, request = " + request.toString(); LOG.error(msg); throw new IllegalArgumentException(msg); } // set the new name of the cluster if change is requested if (!cluster.getClusterName().equals(request.getClusterName())) { if (LOG.isDebugEnabled()) { LOG.debug("Received cluster name change request from " + cluster.getClusterName() + " to " + request.getClusterName()); } if(!AuthorizationHelper.isAuthorized(ResourceType.AMBARI, null, EnumSet.of(RoleAuthorization.AMBARI_RENAME_CLUSTER))) { throw new AuthorizationException("The authenticated user does not have authorization to rename the cluster"); } cluster.setClusterName(request.getClusterName()); } //check if desired configs are available in request and they were changed boolean isConfigurationCreationNeeded = false; if (request.getDesiredConfig() != null) { for (ConfigurationRequest desiredConfig : request.getDesiredConfig()) { Map<String, String> requestConfigProperties = desiredConfig.getProperties(); // processing password properties if(requestConfigProperties != null && !requestConfigProperties.isEmpty()) { Map<PropertyInfo.PropertyType, Set<String>> propertiesTypes = cluster.getConfigPropertiesTypes( desiredConfig.getType() ); for (Entry<String, String> property : requestConfigProperties.entrySet()) { String propertyName = property.getKey(); String propertyValue = property.getValue(); if (propertiesTypes.containsKey(PropertyType.PASSWORD) && propertiesTypes.get(PropertyType.PASSWORD).contains(propertyName)) { if (SecretReference.isSecret(propertyValue)) { SecretReference ref = new SecretReference(propertyValue, cluster); requestConfigProperties.put(propertyName, ref.getValue()); } } } } Map<String,Map<String,String>> requestConfigAttributes = desiredConfig.getPropertiesAttributes(); Config clusterConfig = cluster.getDesiredConfigByType(desiredConfig.getType()); Map<String, String> clusterConfigProperties = null; Map<String,Map<String,String>> clusterConfigAttributes = null; if (clusterConfig != null) { clusterConfigProperties = clusterConfig.getProperties(); clusterConfigAttributes = clusterConfig.getPropertiesAttributes(); if (!isAttributeMapsEqual(requestConfigAttributes, clusterConfigAttributes)){ isConfigurationCreationNeeded = true; break; } } else { isConfigurationCreationNeeded = true; break; } if (requestConfigProperties == null || requestConfigProperties.isEmpty()) { Config existingConfig = cluster.getConfig(desiredConfig.getType(), desiredConfig.getVersionTag()); if (existingConfig != null) { if (!StringUtils.equals(existingConfig.getTag(), clusterConfig.getTag())) { isConfigurationCreationNeeded = true; break; } } } if (requestConfigProperties != null && clusterConfigProperties != null) { if (requestConfigProperties.size() != clusterConfigProperties.size()) { isConfigurationCreationNeeded = true; break; } else { if ( cluster.getServiceByConfigType(clusterConfig.getType()) != null && clusterConfig.getServiceConfigVersions().isEmpty() ) { //If there's no service config versions containing this config (except cluster configs), recreate it even if exactly equal LOG.warn("Existing desired config doesn't belong to any service config version, " + "forcing config recreation, " + "clusterName={}, type = {}, tag={}", cluster.getClusterName(), clusterConfig.getType(), clusterConfig.getTag()); isConfigurationCreationNeeded = true; break; } for (Entry<String, String> property : requestConfigProperties.entrySet()) { if (!StringUtils.equals(property.getValue(), clusterConfigProperties.get(property.getKey()))) { isConfigurationCreationNeeded = true; break; } } } } } } // set or create configuration mapping (and optionally create the map of properties) if (isConfigurationCreationNeeded) { List<ConfigurationRequest> desiredConfigs = request.getDesiredConfig(); if (!desiredConfigs.isEmpty()) { Set<Config> configs = new HashSet<Config>(); String note = null; for (ConfigurationRequest cr : desiredConfigs) { String configType = cr.getType(); // If the config type is for a service, then allow a user with SERVICE_MODIFY_CONFIGS to // update, else ensure the user has CLUSTER_MODIFY_CONFIGS String service = null; try { service = cluster.getServiceForConfigTypes(Collections.singleton(configType)); } catch (IllegalArgumentException e) { // Ignore this since we may have hit a config type that spans multiple services. This may // happen in unit test cases but should not happen with later versions of stacks. } if(StringUtils.isEmpty(service)) { if (!AuthorizationHelper.isAuthorized(ResourceType.CLUSTER, cluster.getResourceId(), EnumSet.of(RoleAuthorization.CLUSTER_MODIFY_CONFIGS))) { throw new AuthorizationException("The authenticated user does not have authorization to modify cluster configurations"); } } else { if (!AuthorizationHelper.isAuthorized(ResourceType.CLUSTER, cluster.getResourceId(), EnumSet.of(RoleAuthorization.SERVICE_MODIFY_CONFIGS))) { throw new AuthorizationException("The authenticated user does not have authorization to modify service configurations"); } } if (null != cr.getProperties()) { // !!! empty property sets are supported, and need to be able to use // previously-defined configs (revert) Map<String, Config> all = cluster.getConfigsByType(configType); if (null == all || // none set !all.containsKey(cr.getVersionTag()) || // tag not set cr.getProperties().size() > 0) { // properties to set // Ensure the user is allowed to update all properties validateAuthorizationToUpdateServiceUsersAndGroups(cluster, cr); LOG.info(MessageFormat.format("Applying configuration with tag ''{0}'' to cluster ''{1}'' for configuration type {2}", cr.getVersionTag(), request.getClusterName(), configType)); cr.setClusterName(cluster.getClusterName()); configurationResponses.add(createConfiguration(cr)); } } note = cr.getServiceConfigVersionNote(); configs.add(cluster.getConfig(configType, cr.getVersionTag())); } if (!configs.isEmpty()) { String authName = getAuthName(); serviceConfigVersionResponse = cluster.addDesiredConfig(authName, configs, note); if (serviceConfigVersionResponse != null) { Logger logger = LoggerFactory.getLogger("configchange"); for (Config config : configs) { logger.info("cluster '" + request.getClusterName() + "' " + "changed by: '" + authName + "'; " + "type='" + config.getType() + "' " + "tag='" + config.getTag() + "'"); } } } } } StackId currentVersion = cluster.getCurrentStackVersion(); StackId desiredVersion = cluster.getDesiredStackVersion(); // Set the current version value if its not already set if (currentVersion == null) { if(!AuthorizationHelper.isAuthorized(ResourceType.CLUSTER, cluster.getResourceId(), EnumSet.of(RoleAuthorization.CLUSTER_UPGRADE_DOWNGRADE_STACK))) { throw new AuthorizationException("The authenticated user does not have authorization to modify stack version"); } cluster.setCurrentStackVersion(desiredVersion); } // Stack Upgrade: unlike the workflow for creating a cluster, updating a cluster via the API will not // create any ClusterVersionEntity changes because those have to go through the Stack Upgrade process. boolean requiresHostListUpdate = request.getHostNames() != null && !request.getHostNames().isEmpty(); if (requiresHostListUpdate) { clusters.mapHostsToCluster( request.getHostNames(), request.getClusterName()); } // set the provisioning state of the cluster if (null != request.getProvisioningState()) { State oldProvisioningState = cluster.getProvisioningState(); State provisioningState = State.valueOf(request.getProvisioningState()); if (provisioningState != State.INIT && provisioningState != State.INSTALLED) { LOG.warn( "Invalid cluster provisioning state {} cannot be set on the cluster {}", provisioningState, request.getClusterName()); throw new IllegalArgumentException( "Invalid cluster provisioning state " + provisioningState + " cannot be set on cluster " + request.getClusterName()); } if (provisioningState != oldProvisioningState) { boolean isStateTransitionValid = State.isValidDesiredStateTransition( oldProvisioningState, provisioningState); if (!isStateTransitionValid) { LOG.warn( "Invalid cluster provisioning 2state {} cannot be set on the cluster {} because the current state is {}", provisioningState, request.getClusterName(), oldProvisioningState); throw new AmbariException("Invalid transition for" + " cluster provisioning state" + ", clusterName=" + cluster.getClusterName() + ", clusterId=" + cluster.getClusterId() + ", currentProvisioningState=" + oldProvisioningState + ", newProvisioningState=" + provisioningState); } } cluster.setProvisioningState(provisioningState); } if (null != request.getServiceConfigVersionRequest()) { if(!AuthorizationHelper.isAuthorized(ResourceType.CLUSTER, cluster.getResourceId(), EnumSet.of(RoleAuthorization.SERVICE_MODIFY_CONFIGS))) { throw new AuthorizationException("The authenticated user does not have authorization to modify service configurations"); } ServiceConfigVersionRequest serviceConfigVersionRequest = request.getServiceConfigVersionRequest(); if (StringUtils.isEmpty(serviceConfigVersionRequest.getServiceName()) || null == serviceConfigVersionRequest.getVersion()) { String msg = "Service name and version should be specified in service config version"; LOG.error(msg); throw new IllegalArgumentException(msg); } serviceConfigVersionResponse = cluster.setServiceConfigVersion(serviceConfigVersionRequest.getServiceName(), serviceConfigVersionRequest.getVersion(), getAuthName(), serviceConfigVersionRequest.getNote()); } if (serviceConfigVersionResponse != null) { if (!configurationResponses.isEmpty()) { serviceConfigVersionResponse.setConfigurations(configurationResponses); } ClusterResponse clusterResponse = new ClusterResponse(cluster.getClusterId(), cluster.getClusterName(), null, null, null, null, null, null); Map<String, Collection<ServiceConfigVersionResponse>> map = new HashMap<String, Collection<ServiceConfigVersionResponse>>(); map.put(serviceConfigVersionResponse.getServiceName(), Collections.singletonList(serviceConfigVersionResponse)); clusterResponse.setDesiredServiceConfigVersions(map); //workaround to be able to retrieve update results in resource provider //as this method only expected to return request response saveClusterUpdate(request, clusterResponse); } // set the new security type of the cluster if change is requested SecurityType securityType = request.getSecurityType(); if(securityType != null) { // if any custom operations are valid and requested, the process of executing them should be initiated, // most of the validation logic will be left to the KerberosHelper to avoid polluting the controller if (kerberosHelper.shouldExecuteCustomOperations(securityType, requestProperties)) { if(!AuthorizationHelper.isAuthorized(ResourceType.CLUSTER, cluster.getResourceId(), EnumSet.of(RoleAuthorization.CLUSTER_TOGGLE_KERBEROS))) { throw new AuthorizationException("The authenticated user does not have authorization to perform Kerberos-specific operations"); } try { requestStageContainer = kerberosHelper.executeCustomOperations(cluster, requestProperties, requestStageContainer, kerberosHelper.getManageIdentitiesDirective(requestProperties)); } catch (KerberosOperationException e) { throw new IllegalArgumentException(e.getMessage(), e); } } else { // If force_toggle_kerberos is not specified, null will be returned. Therefore, perform an // equals check to yield true if the result is Boolean.TRUE, otherwise false. boolean forceToggleKerberos = kerberosHelper.getForceToggleKerberosDirective(requestProperties); if (forceToggleKerberos || (cluster.getSecurityType() != securityType)) { LOG.info("Received cluster security type change request from {} to {} (forced: {})", cluster.getSecurityType().name(), securityType.name(), forceToggleKerberos); if ((securityType == SecurityType.KERBEROS) || (securityType == SecurityType.NONE)) { if (!AuthorizationHelper.isAuthorized(ResourceType.CLUSTER, cluster.getResourceId(), EnumSet.of(RoleAuthorization.CLUSTER_TOGGLE_KERBEROS))) { throw new AuthorizationException("The authenticated user does not have authorization to enable or disable Kerberos"); } // Since the security state of the cluster has changed, invoke toggleKerberos to handle // adding or removing Kerberos from the cluster. This may generate multiple stages // or not depending the current state of the cluster. try { requestStageContainer = kerberosHelper.toggleKerberos(cluster, securityType, requestStageContainer, kerberosHelper.getManageIdentitiesDirective(requestProperties)); } catch (KerberosOperationException e) { throw new IllegalArgumentException(e.getMessage(), e); } } else { throw new IllegalArgumentException(String.format("Unexpected security type encountered: %s", securityType.name())); } cluster.setSecurityType(securityType); } } } if (requestStageContainer != null) { requestStageContainer.persist(); return requestStageContainer.getRequestStatusResponse(); } else { return null; } } /** * Comparison of two attributes maps * @param requestConfigAttributes - attribute map sent from API * @param clusterConfigAttributes - existed attribute map * @return true if maps is equal (have the same attributes and their values) */ public boolean isAttributeMapsEqual(Map<String, Map<String, String>> requestConfigAttributes, Map<String, Map<String, String>> clusterConfigAttributes) { boolean isAttributesEqual = true; if ((requestConfigAttributes != null && clusterConfigAttributes == null) || (requestConfigAttributes == null && clusterConfigAttributes != null) || (requestConfigAttributes != null && clusterConfigAttributes != null && !requestConfigAttributes.keySet().equals(clusterConfigAttributes.keySet()))) { return false; } else if (clusterConfigAttributes != null && requestConfigAttributes != null) { for (Entry<String, Map<String, String>> ClusterEntrySet : clusterConfigAttributes.entrySet()) { Map<String, String> clusterMapAttributes = ClusterEntrySet.getValue(); Map<String, String> requestMapAttributes = requestConfigAttributes.get(ClusterEntrySet.getKey()); if ((requestMapAttributes != null && clusterMapAttributes == null) || (requestMapAttributes == null && clusterMapAttributes != null) || (requestMapAttributes != null && clusterMapAttributes != null && !requestMapAttributes.keySet().equals(clusterMapAttributes.keySet()))) { return false; } else if (requestMapAttributes != null && clusterMapAttributes != null) { for (Entry<String, String> requestPropertyEntrySet : requestMapAttributes.entrySet()) { String requestPropertyValue = requestPropertyEntrySet.getValue(); String clusterPropertyValue = clusterMapAttributes.get(requestPropertyEntrySet.getKey()); if ((requestPropertyValue != null && clusterPropertyValue == null) || (requestPropertyValue == null && clusterPropertyValue != null) || (requestPropertyValue != null && clusterPropertyValue != null && !requestPropertyValue.equals(clusterPropertyValue))) { return false; } } } } } return isAttributesEqual; } /** * Save cluster update results to retrieve later * @param clusterRequest cluster request info * @param clusterResponse cluster response info */ public void saveClusterUpdate(ClusterRequest clusterRequest, ClusterResponse clusterResponse) { clusterUpdateCache.put(clusterRequest, clusterResponse); } @Override public ClusterResponse getClusterUpdateResults(ClusterRequest clusterRequest) { return clusterUpdateCache.getIfPresent(clusterRequest); } @Override public String getJobTrackerHost(Cluster cluster) { try { Service svc = cluster.getService("MAPREDUCE"); ServiceComponent sc = svc.getServiceComponent(Role.JOBTRACKER.toString()); if (sc.getServiceComponentHosts() != null && !sc.getServiceComponentHosts().isEmpty()) { return sc.getServiceComponentHosts().keySet().iterator().next(); } } catch (AmbariException ex) { return null; } return null; } private Set<String> getServicesForSmokeTests(Cluster cluster, Map<State, List<Service>> changedServices, Map<String, Map<State, List<ServiceComponentHost>>> changedScHosts, boolean runSmokeTest) throws AmbariException { // We choose the most general (high-level) op level here. As a result, // service checks will be only launched for services/components that // are not in a Maintenance state. Resource.Type opLvl = Resource.Type.Cluster; Set<String> smokeTestServices = new HashSet<String>(); // Adding smoke checks for changed services if (changedServices != null) { for (Entry<State, List<Service>> entry : changedServices.entrySet()) { if (State.STARTED != entry.getKey()) { continue; } for (Service s : entry.getValue()) { if (runSmokeTest && (State.INSTALLED == s.getDesiredState() && maintenanceStateHelper.isOperationAllowed(opLvl, s))) { smokeTestServices.add(s.getName()); } } } } // Adding smoke checks for changed host components Map<String, Map<String, Integer>> changedComponentCount = new HashMap<String, Map<String, Integer>>(); for (Map<State, List<ServiceComponentHost>> stateScHostMap : changedScHosts.values()) { for (Entry<State, List<ServiceComponentHost>> entry : stateScHostMap.entrySet()) { if (State.STARTED != entry.getKey()) { continue; } for (ServiceComponentHost sch : entry.getValue()) { if (State.INSTALLED != sch.getState()) { continue; } if (! maintenanceStateHelper.isOperationAllowed(opLvl, sch)) { continue; } if (!changedComponentCount.containsKey(sch.getServiceName())) { changedComponentCount.put(sch.getServiceName(), new HashMap<String, Integer>()); } if (!changedComponentCount.get(sch.getServiceName()) .containsKey(sch.getServiceComponentName())) { changedComponentCount.get(sch.getServiceName()) .put(sch.getServiceComponentName(), 1); } else { Integer i = changedComponentCount.get(sch.getServiceName()) .get(sch.getServiceComponentName()); changedComponentCount.get(sch.getServiceName()) .put(sch.getServiceComponentName(), ++i); } } } } // Add service checks for any changed master component hosts or if // more then one component has been changed for a service for (Entry<String, Map<String, Integer>> entry : changedComponentCount.entrySet()) { String serviceName = entry.getKey(); Service s = cluster.getService(serviceName); // smoke test service if more than one component is started if (runSmokeTest && (entry.getValue().size() > 1) && maintenanceStateHelper.isOperationAllowed(opLvl, s)) { smokeTestServices.add(serviceName); continue; } for (String componentName : changedComponentCount.get(serviceName).keySet()) { ServiceComponent sc = cluster.getService(serviceName). getServiceComponent(componentName); StackId stackId = sc.getDesiredStackVersion(); ComponentInfo compInfo = ambariMetaInfo.getComponent( stackId.getStackName(), stackId.getStackVersion(), serviceName, componentName); if (runSmokeTest && compInfo.isMaster() && // op lvl handling for service component // is the same as for service maintenanceStateHelper.isOperationAllowed(opLvl, s)) { smokeTestServices.add(serviceName); } // FIXME if master check if we need to run a smoke test for the master } } return smokeTestServices; } private void addClientSchForReinstall(Cluster cluster, Map<State, List<Service>> changedServices, Map<String, Map<State, List<ServiceComponentHost>>> changedScHosts) throws AmbariException { Set<String> services = new HashSet<String>(); // This is done to account for services with client only components. if (changedServices != null) { for (Entry<State, List<Service>> entry : changedServices.entrySet()) { if (State.STARTED != entry.getKey()) { continue; } for (Service s : entry.getValue()) { if (State.INSTALLED == s.getDesiredState()) { services.add(s.getName()); } } } } // Flatten changed Schs that are going to be Started List<ServiceComponentHost> serviceComponentHosts = new ArrayList<ServiceComponentHost>(); if (changedScHosts != null && !changedScHosts.isEmpty()) { for (Entry<String, Map<State, List<ServiceComponentHost>>> stringMapEntry : changedScHosts.entrySet()) { for (State state : stringMapEntry.getValue().keySet()) { if (state == State.STARTED) { serviceComponentHosts.addAll(stringMapEntry.getValue().get(state)); } } } } if (!serviceComponentHosts.isEmpty()) { for (ServiceComponentHost sch : serviceComponentHosts) { services.add(sch.getServiceName()); } } if (services.isEmpty()) { return; } Map<String, List<ServiceComponentHost>> clientSchs = new HashMap<String, List<ServiceComponentHost>>(); for (String serviceName : services) { Service s = cluster.getService(serviceName); for (String component : s.getServiceComponents().keySet()) { List<ServiceComponentHost> potentialHosts = new ArrayList<ServiceComponentHost>(); ServiceComponent sc = s.getServiceComponents().get(component); if (sc.isClientComponent()) { for (ServiceComponentHost potentialSch : sc.getServiceComponentHosts().values()) { Host host = clusters.getHost(potentialSch.getHostName()); // Host is alive and neither host nor SCH is in Maintenance State if (!potentialSch.getHostState().equals(HostState.HEARTBEAT_LOST) && potentialSch.getMaintenanceState() != MaintenanceState.ON && host.getMaintenanceState(cluster.getClusterId()) == MaintenanceState.OFF) { potentialHosts.add(potentialSch); } } } if (!potentialHosts.isEmpty()) { clientSchs.put(sc.getName(), potentialHosts); } } } LOG.info("Client hosts for reinstall : " + clientSchs.size()); if (changedScHosts != null) { for (Entry<String, List<ServiceComponentHost>> stringListEntry : clientSchs.entrySet()) { Map<State, List<ServiceComponentHost>> schMap = new EnumMap<State, List<ServiceComponentHost>>(State.class); schMap.put(State.INSTALLED, stringListEntry.getValue()); changedScHosts.put(stringListEntry.getKey(), schMap); } } } @Override public Map<String, Map<String,String>> findConfigurationTagsWithOverrides( Cluster cluster, String hostName) throws AmbariException { return configHelper.getEffectiveDesiredTags(cluster, hostName); } @Override public RequestExecutionFactory getRequestExecutionFactory() { return requestExecutionFactory; } @Override public ExecutionScheduleManager getExecutionScheduleManager() { return executionScheduleManager; } /** * Creates and populates an EXECUTION_COMMAND for host */ private void createHostAction(Cluster cluster, Stage stage, ServiceComponentHost scHost, Map<String, Map<String, String>> configurations, Map<String, Map<String, Map<String, String>>> configurationAttributes, Map<String, Map<String, String>> configTags, RoleCommand roleCommand, Map<String, String> commandParamsInp, ServiceComponentHostEvent event ) throws AmbariException { String serviceName = scHost.getServiceName(); stage.addHostRoleExecutionCommand(scHost.getHost(), Role.valueOf(scHost.getServiceComponentName()), roleCommand, event, cluster, serviceName, false, false); String componentName = scHost.getServiceComponentName(); String hostname = scHost.getHostName(); String osFamily = clusters.getHost(hostname).getOsFamily(); StackId stackId = cluster.getDesiredStackVersion(); ServiceInfo serviceInfo = ambariMetaInfo.getService(stackId.getStackName(), stackId.getStackVersion(), serviceName); ComponentInfo componentInfo = ambariMetaInfo.getComponent( stackId.getStackName(), stackId.getStackVersion(), serviceName, componentName); StackInfo stackInfo = ambariMetaInfo.getStack(stackId.getStackName(), stackId.getStackVersion()); ExecutionCommand execCmd = stage.getExecutionCommandWrapper(scHost.getHostName(), scHost.getServiceComponentName()).getExecutionCommand(); Host host = clusters.getHost(scHost.getHostName()); execCmd.setConfigurations(configurations); execCmd.setConfigurationAttributes(configurationAttributes); execCmd.setConfigurationTags(configTags); // Create a local copy for each command Map<String, String> commandParams = new TreeMap<String, String>(); if (commandParamsInp != null) { // if not defined commandParams.putAll(commandParamsInp); } //Propogate HCFS service type info Iterator<Service> it = cluster.getServices().values().iterator(); while(it.hasNext()) { ServiceInfo serviceInfoInstance = ambariMetaInfo.getService(stackId.getStackName(),stackId.getStackVersion(), it.next().getName()); LOG.info("Iterating service type Instance in createHostAction:: " + serviceInfoInstance.getName()); if(serviceInfoInstance.getServiceType() != null) { LOG.info("Adding service type info in createHostAction:: " + serviceInfoInstance.getServiceType()); commandParams.put("dfs_type",serviceInfoInstance.getServiceType()); break; } } boolean isInstallCommand = roleCommand.equals(RoleCommand.INSTALL); String agentDefaultCommandTimeout = configs.getDefaultAgentTaskTimeout(isInstallCommand); String scriptCommandTimeout = ""; /* * This script is only used for * default commands like INSTALL/STOP/START */ CommandScriptDefinition script = componentInfo.getCommandScript(); if (serviceInfo.getSchemaVersion().equals(AmbariMetaInfo.SCHEMA_VERSION_2)) { if (script != null) { commandParams.put(SCRIPT, script.getScript()); commandParams.put(SCRIPT_TYPE, script.getScriptType().toString()); boolean retryEnabled = false; Integer retryMaxTime = 0; if (commandParams.containsKey(CLUSTER_PHASE_PROPERTY) && (commandParams.get(CLUSTER_PHASE_PROPERTY).equals(CLUSTER_PHASE_INITIAL_INSTALL) || commandParams.get(CLUSTER_PHASE_PROPERTY).equals(CLUSTER_PHASE_INITIAL_START))) { String retryEnabledStr = configHelper.getValueFromDesiredConfigurations(cluster, ConfigHelper.CLUSTER_ENV, ConfigHelper.CLUSTER_ENV_RETRY_ENABLED); String commandsStr = configHelper.getValueFromDesiredConfigurations(cluster, ConfigHelper.CLUSTER_ENV, ConfigHelper.CLUSTER_ENV_RETRY_COMMANDS); String retryMaxTimeStr = configHelper.getValueFromDesiredConfigurations(cluster, ConfigHelper.CLUSTER_ENV, ConfigHelper.CLUSTER_ENV_RETRY_MAX_TIME_IN_SEC); if (StringUtils.isNotEmpty(retryEnabledStr)) { retryEnabled = Boolean.TRUE.toString().equals(retryEnabledStr); } if (retryEnabled) { retryMaxTime = NumberUtils.toInt(retryMaxTimeStr, 0); if (retryMaxTime < 0) { retryMaxTime = 0; } if (StringUtils.isNotEmpty(commandsStr)) { boolean commandMayBeRetried = false; String[] commands = commandsStr.split(","); for (String command : commands) { if (roleCommand.toString().equals(command.trim())) { commandMayBeRetried = true; } } retryEnabled = commandMayBeRetried; } } } commandParams.put(MAX_DURATION_OF_RETRIES, Integer.toString(retryMaxTime)); commandParams.put(COMMAND_RETRY_ENABLED, Boolean.toString(retryEnabled)); ClusterVersionEntity effectiveClusterVersion = cluster.getEffectiveClusterVersion(); if (effectiveClusterVersion != null) { commandParams.put(VERSION, effectiveClusterVersion.getRepositoryVersion().getVersion()); } if (script.getTimeout() > 0) { scriptCommandTimeout = String.valueOf(script.getTimeout()); } } else { String message = String.format("Component %s of service %s has no " + "command script defined", componentName, serviceName); throw new AmbariException(message); } } String actualTimeout = (!scriptCommandTimeout.equals("") ? scriptCommandTimeout : agentDefaultCommandTimeout); // Because the INSTALL command can take much longer than typical commands, set the timeout to be the max // between the script's service component timeout and the agent default timeout. if (roleCommand.equals(RoleCommand.INSTALL) && !agentDefaultCommandTimeout.equals("") && Integer.parseInt(actualTimeout) < Integer.parseInt(agentDefaultCommandTimeout)) { actualTimeout = agentDefaultCommandTimeout; } commandParams.put(COMMAND_TIMEOUT, actualTimeout); commandParams.put(SERVICE_PACKAGE_FOLDER, serviceInfo.getServicePackageFolder()); commandParams.put(HOOKS_FOLDER, stackInfo.getStackHooksFolder()); String clusterName = cluster.getClusterName(); if (customCommandExecutionHelper.isTopologyRefreshRequired(roleCommand.name(), clusterName, serviceName)) { commandParams.put(ExecutionCommand.KeyNames.REFRESH_TOPOLOGY, "True"); } execCmd.setCommandParams(commandParams); String repoInfo = customCommandExecutionHelper.getRepoInfo(cluster, host); if (LOG.isDebugEnabled()) { LOG.debug("Sending repo information to agent" + ", hostname=" + scHost.getHostName() + ", clusterName=" + clusterName + ", stackInfo=" + stackId.getStackId() + ", repoInfo=" + repoInfo); } Map<String, String> hostParams = new TreeMap<String, String>(); hostParams.put(REPO_INFO, repoInfo); hostParams.putAll(getRcaParameters()); // use the effective cluster version here since this command might happen // in the context of an upgrade and we should send the repo ID which matches // the version being send down RepositoryVersionEntity repoVersion = null; ClusterVersionEntity effectiveClusterVersion = cluster.getEffectiveClusterVersion(); if (null != effectiveClusterVersion) { repoVersion = effectiveClusterVersion.getRepositoryVersion(); } else { List<ClusterVersionEntity> list = clusterVersionDAO.findByClusterAndState(cluster.getClusterName(), RepositoryVersionState.INIT); if (1 == list.size()) { repoVersion = list.get(0).getRepositoryVersion(); } } if (null != repoVersion) { try { VersionDefinitionXml xml = repoVersion.getRepositoryXml(); if (null != xml && !StringUtils.isBlank(xml.release.packageVersion)) { hostParams.put(PACKAGE_VERSION, xml.release.packageVersion); } } catch (Exception e) { throw new AmbariException(String.format("Could not load version xml from repo version %s", repoVersion.getVersion()), e); } hostParams.put(KeyNames.REPO_VERSION_ID, repoVersion.getId().toString()); } List<ServiceOsSpecific.Package> packages = getPackagesForServiceHost(serviceInfo, hostParams, osFamily); String packageList = gson.toJson(packages); hostParams.put(PACKAGE_LIST, packageList); Set<String> userSet = configHelper.getPropertyValuesWithPropertyType(stackId, PropertyType.USER, cluster); String userList = gson.toJson(userSet); hostParams.put(USER_LIST, userList); Set<String> groupSet = configHelper.getPropertyValuesWithPropertyType(stackId, PropertyType.GROUP, cluster); String groupList = gson.toJson(groupSet); hostParams.put(GROUP_LIST, groupList); Set<String> notManagedHdfsPathSet = configHelper.getPropertyValuesWithPropertyType(stackId, PropertyType.NOT_MANAGED_HDFS_PATH, cluster); String notManagedHdfsPathList = gson.toJson(notManagedHdfsPathSet); hostParams.put(NOT_MANAGED_HDFS_PATH_LIST, notManagedHdfsPathList); DatabaseType databaseType = configs.getDatabaseType(); if (databaseType == DatabaseType.ORACLE) { hostParams.put(DB_DRIVER_FILENAME, configs.getOjdbcJarName()); } else if (databaseType == DatabaseType.MYSQL) { hostParams.put(DB_DRIVER_FILENAME, configs.getMySQLJarName()); } List<String> clientsToUpdateConfigsList = componentInfo.getClientsToUpdateConfigs(); if (clientsToUpdateConfigsList == null) { clientsToUpdateConfigsList = new ArrayList<String>(); clientsToUpdateConfigsList.add("*"); } String clientsToUpdateConfigs = gson.toJson(clientsToUpdateConfigsList); hostParams.put(CLIENTS_TO_UPDATE_CONFIGS, clientsToUpdateConfigs); execCmd.setHostLevelParams(hostParams); Map<String, String> roleParams = new TreeMap<String, String>(); // !!! consistent with where custom commands put variables // !!! after-INSTALL hook checks this such that the stack selection tool won't // select-all to a version that is not being upgraded, breaking RU if (cluster.isUpgradeSuspended()) { roleParams.put(KeyNames.UPGRADE_SUSPENDED, Boolean.TRUE.toString().toLowerCase()); } execCmd.setRoleParams(roleParams); if ((execCmd != null) && (execCmd.getConfigurationTags().containsKey("cluster-env"))) { LOG.info("AmbariManagementControllerImpl.createHostAction: created ExecutionCommand for host {}, role {}, roleCommand {}, and command ID {}, with cluster-env tags {}", execCmd.getHostname(), execCmd.getRole(), execCmd.getRoleCommand(), execCmd.getCommandId(), execCmd.getConfigurationTags().get("cluster-env").get("tag")); } } /** * Computes os-dependent packages for service/host. Does not take into * account package dependencies for ANY_OS. Instead of this method * you should use getPackagesForServiceHost() * because it takes into account both os-dependent and os-independent lists * of packages for service. * @param hostParams may be modified (appended SERVICE_REPO_INFO) * @return a list of os-dependent packages for host */ protected ServiceOsSpecific populateServicePackagesInfo(ServiceInfo serviceInfo, Map<String, String> hostParams, String osFamily) { ServiceOsSpecific hostOs = new ServiceOsSpecific(osFamily); List<ServiceOsSpecific> foundOSSpecifics = getOSSpecificsByFamily(serviceInfo.getOsSpecifics(), osFamily); if (!foundOSSpecifics.isEmpty()) { for (ServiceOsSpecific osSpecific : foundOSSpecifics) { hostOs.addPackages(osSpecific.getPackages()); } // Choose repo that is relevant for host ServiceOsSpecific.Repo serviceRepo = hostOs.getRepo(); if (serviceRepo != null) { String serviceRepoInfo = gson.toJson(serviceRepo); hostParams.put(SERVICE_REPO_INFO, serviceRepoInfo); } } return hostOs; } @Override public List<ServiceOsSpecific.Package> getPackagesForServiceHost(ServiceInfo serviceInfo, Map<String, String> hostParams, String osFamily) { // Write down os specific info for the service ServiceOsSpecific anyOs = null; if (serviceInfo.getOsSpecifics().containsKey(AmbariMetaInfo.ANY_OS)) { anyOs = serviceInfo.getOsSpecifics().get(AmbariMetaInfo.ANY_OS); } ServiceOsSpecific hostOs = populateServicePackagesInfo(serviceInfo, hostParams, osFamily); // Build package list that is relevant for host List<ServiceOsSpecific.Package> packages = new ArrayList<ServiceOsSpecific.Package>(); if (anyOs != null) { packages.addAll(anyOs.getPackages()); } if (hostOs != null) { packages.addAll(hostOs.getPackages()); } return packages; } private List<ServiceOsSpecific> getOSSpecificsByFamily(Map<String, ServiceOsSpecific> osSpecifics, String osFamily) { List<ServiceOsSpecific> foundedOSSpecifics = new ArrayList<ServiceOsSpecific>(); for (Entry<String, ServiceOsSpecific> osSpecific : osSpecifics.entrySet()) { if (osSpecific.getKey().contains(osFamily)) { foundedOSSpecifics.add(osSpecific.getValue()); } } return foundedOSSpecifics; } private ActionExecutionContext getActionExecutionContext (ExecuteActionRequest actionRequest) throws AmbariException { RequestOperationLevel operationLevel = actionRequest.getOperationLevel(); if (actionRequest.isCommand()) { ActionExecutionContext actionExecutionContext = new ActionExecutionContext(actionRequest.getClusterName(), actionRequest.getCommandName(), actionRequest.getResourceFilters(), actionRequest.getParameters()); actionExecutionContext.setOperationLevel(operationLevel); return actionExecutionContext; } else { // If action ActionDefinition actionDef = ambariMetaInfo.getActionDefinition(actionRequest.getActionName()); if (actionDef == null) { throw new AmbariException( "Action " + actionRequest.getActionName() + " does not exist"); } ActionExecutionContext actionExecutionContext = new ActionExecutionContext(actionRequest.getClusterName(), actionRequest.getActionName(), actionRequest.getResourceFilters(), actionRequest.getParameters(), actionDef.getTargetType(), actionDef.getDefaultTimeout(), actionDef.getTargetService(), actionDef.getTargetComponent()); actionExecutionContext.setOperationLevel(operationLevel); return actionExecutionContext; } } private RequestStageContainer doStageCreation(RequestStageContainer requestStages, Cluster cluster, Map<State, List<Service>> changedServices, Map<State, List<ServiceComponent>> changedComps, Map<String, Map<State, List<ServiceComponentHost>>> changedScHosts, Map<String, String> requestParameters, Map<String, String> requestProperties, boolean runSmokeTest, boolean reconfigureClients) throws AmbariException { // TODO handle different transitions? // Say HDFS to stopped and MR to started, what order should actions be done // in? // TODO additional validation? // verify all configs // verify all required components if ((changedServices == null || changedServices.isEmpty()) && (changedComps == null || changedComps.isEmpty()) && (changedScHosts == null || changedScHosts.isEmpty())) { LOG.debug("Created 0 stages"); return requestStages; } // smoke test any service that goes from installed to started Set<String> smokeTestServices = getServicesForSmokeTests(cluster, changedServices, changedScHosts, runSmokeTest); if (reconfigureClients) { // Re-install client only hosts to reattach changed configs on service // restart addClientSchForReinstall(cluster, changedServices, changedScHosts); } if (!changedScHosts.isEmpty() || !smokeTestServices.isEmpty()) { long nowTimestamp = System.currentTimeMillis(); // FIXME cannot work with a single stage // multiple stages may be needed for reconfigure Map<String, Set<String>> clusterHostInfo = StageUtils.getClusterHostInfo(cluster); String clusterHostInfoJson = StageUtils.getGson().toJson(clusterHostInfo); String hostParamsJson = StageUtils.getGson().toJson( customCommandExecutionHelper.createDefaultHostParams(cluster)); Stage stage = createNewStage(requestStages.getLastStageId(), cluster, requestStages.getId(), requestProperties.get(REQUEST_CONTEXT_PROPERTY), clusterHostInfoJson, "{}", hostParamsJson); Collection<ServiceComponentHost> componentsToEnableKerberos = new ArrayList<ServiceComponentHost>(); Set<String> hostsToForceKerberosOperations = new HashSet<String>(); for (String compName : changedScHosts.keySet()) { for (State newState : changedScHosts.get(compName).keySet()) { for (ServiceComponentHost scHost : changedScHosts.get(compName).get(newState)) { // Do not create role command for hosts that are not responding if (scHost.getHostState().equals(HostState.HEARTBEAT_LOST)) { LOG.info("Command is not created for servicecomponenthost " + ", clusterName=" + cluster.getClusterName() + ", clusterId=" + cluster.getClusterId() + ", serviceName=" + scHost.getServiceName() + ", componentName=" + scHost.getServiceComponentName() + ", hostname=" + scHost.getHostName() + ", hostState=" + scHost.getHostState() + ", targetNewState=" + newState); continue; } RoleCommand roleCommand; State oldSchState = scHost.getState(); ServiceComponentHostEvent event; switch (newState) { case INSTALLED: if (oldSchState == State.INIT || oldSchState == State.UNINSTALLED || oldSchState == State.INSTALLED || oldSchState == State.INSTALLING || oldSchState == State.UNKNOWN || oldSchState == State.INSTALL_FAILED) { roleCommand = RoleCommand.INSTALL; event = new ServiceComponentHostInstallEvent( scHost.getServiceComponentName(), scHost.getHostName(), nowTimestamp, scHost.getDesiredStackVersion().getStackId()); // If the state is transitioning from INIT TO INSTALLED and the cluster has Kerberos // enabled, mark this ServiceComponentHost to see if anything needs to be done to // make sure it is properly configured. // If the component is transitioning from an INSTALL_FAILED to an INSTALLED state indicates a failure attempt on install // followed by a new installation attempt and will also need Kerberos related configuration addressing // The Kerberos-related stages needs to be // between the INSTALLED and STARTED states because some services need to set up // the host (i,e, create user accounts, etc...) before Kerberos-related tasks an // occur (like distribute keytabs) if((oldSchState == State.INIT || oldSchState == State.INSTALL_FAILED) && kerberosHelper.isClusterKerberosEnabled(cluster)) { // check if host component already exists, if it exists no need to reset kerberos configs // check if it's blueprint install. If it is, then do not call kerberos.configureService if (!hostComponentAlreadyExists(cluster, scHost) && !("INITIAL_INSTALL".equals(requestProperties.get("phase")))) { try { kerberosHelper.configureService(cluster, scHost); } catch (KerberosInvalidConfigurationException e) { throw new AmbariException(e.getMessage(), e); } } componentsToEnableKerberos.add(scHost); if(Service.Type.KERBEROS.name().equalsIgnoreCase(scHost.getServiceName()) && Role.KERBEROS_CLIENT.name().equalsIgnoreCase(scHost.getServiceComponentName())) { // Since the KERBEROS/KERBEROS_CLIENT is about to be moved from the INIT to the // INSTALLED state (and it should be by the time the stages (in this request) // that need to be execute), collect the relevant hostname to make sure the // Kerberos logic doest not skip operations for it. hostsToForceKerberosOperations.add(scHost.getHostName()); } } } else if (oldSchState == State.STARTED // TODO: oldSchState == State.INSTALLED is always false, looks like a bug //|| oldSchState == State.INSTALLED || oldSchState == State.STOPPING) { roleCommand = RoleCommand.STOP; event = new ServiceComponentHostStopEvent( scHost.getServiceComponentName(), scHost.getHostName(), nowTimestamp); } else if (oldSchState == State.UPGRADING) { roleCommand = RoleCommand.UPGRADE; event = new ServiceComponentHostUpgradeEvent( scHost.getServiceComponentName(), scHost.getHostName(), nowTimestamp, scHost.getDesiredStackVersion().getStackId()); } else { throw new AmbariException("Invalid transition for" + " servicecomponenthost" + ", clusterName=" + cluster.getClusterName() + ", clusterId=" + cluster.getClusterId() + ", serviceName=" + scHost.getServiceName() + ", componentName=" + scHost.getServiceComponentName() + ", hostname=" + scHost.getHostName() + ", currentState=" + oldSchState + ", newDesiredState=" + newState); } break; case STARTED: StackId stackId = scHost.getDesiredStackVersion(); ComponentInfo compInfo = ambariMetaInfo.getComponent( stackId.getStackName(), stackId.getStackVersion(), scHost.getServiceName(), scHost.getServiceComponentName()); if (oldSchState == State.INSTALLED || oldSchState == State.STARTING || //todo: after separating install and start, the install stage is no longer in request stage container //todo: so projected state will not equal INSTALLED which causes an exception for invalid state transition //todo: so for now disabling this check //todo: this change breaks test AmbariManagementControllerTest.testServiceComponentHostUpdateRecursive() true) { // requestStages.getProjectedState(scHost.getHostName(), // scHost.getServiceComponentName()) == State.INSTALLED) { roleCommand = RoleCommand.START; event = new ServiceComponentHostStartEvent( scHost.getServiceComponentName(), scHost.getHostName(), nowTimestamp); } else { String error = "Invalid transition for" + " servicecomponenthost" + ", clusterName=" + cluster.getClusterName() + ", clusterId=" + cluster.getClusterId() + ", serviceName=" + scHost.getServiceName() + ", componentName=" + scHost.getServiceComponentName() + ", hostname=" + scHost.getHostName() + ", currentState=" + oldSchState + ", newDesiredState=" + newState; if (compInfo.isMaster()) { throw new AmbariException(error); } else { LOG.info("Ignoring: " + error); continue; } } break; case UNINSTALLED: if (oldSchState == State.INSTALLED || oldSchState == State.UNINSTALLING) { roleCommand = RoleCommand.UNINSTALL; event = new ServiceComponentHostStartEvent( scHost.getServiceComponentName(), scHost.getHostName(), nowTimestamp); } else { throw new AmbariException("Invalid transition for" + " servicecomponenthost" + ", clusterName=" + cluster.getClusterName() + ", clusterId=" + cluster.getClusterId() + ", serviceName=" + scHost.getServiceName() + ", componentName=" + scHost.getServiceComponentName() + ", hostname=" + scHost.getHostName() + ", currentState=" + oldSchState + ", newDesiredState=" + newState); } break; case INIT: throw new AmbariException("Unsupported transition to INIT for" + " servicecomponenthost" + ", clusterName=" + cluster.getClusterName() + ", clusterId=" + cluster.getClusterId() + ", serviceName=" + scHost.getServiceName() + ", componentName=" + scHost.getServiceComponentName() + ", hostname=" + scHost.getHostName() + ", currentState=" + oldSchState + ", newDesiredState=" + newState); default: throw new AmbariException("Unsupported state change operation" + ", newState=" + newState.toString()); } if (LOG.isDebugEnabled()) { LOG.debug("Create a new host action" + ", requestId=" + requestStages.getId() + ", componentName=" + scHost.getServiceComponentName() + ", hostname=" + scHost.getHostName() + ", roleCommand=" + roleCommand.name()); } // any targeted information String keyName = scHost.getServiceComponentName().toLowerCase(); if (requestProperties.containsKey(keyName)) { // in the case where the command is targeted, but the states // of the old and new are the same, the targeted component // may still need to get the command. This is true for Flume. if (oldSchState == newState) { switch (oldSchState) { case INSTALLED: roleCommand = RoleCommand.STOP; event = new ServiceComponentHostStopEvent( scHost.getServiceComponentName(), scHost.getHostName(), nowTimestamp); break; case STARTED: roleCommand = RoleCommand.START; event = new ServiceComponentHostStartEvent( scHost.getServiceComponentName(), scHost.getHostName(), nowTimestamp); break; default: break; } } if (null == requestParameters) { requestParameters = new HashMap<String, String>(); } requestParameters.put(keyName, requestProperties.get(keyName)); } if (requestProperties.containsKey(CLUSTER_PHASE_PROPERTY)) { if (null == requestParameters) { requestParameters = new HashMap<String, String>(); } requestParameters.put(CLUSTER_PHASE_PROPERTY, requestProperties.get(CLUSTER_PHASE_PROPERTY)); } Map<String, Map<String, String>> configurations = new TreeMap<String, Map<String, String>>(); Map<String, Map<String, Map<String, String>>> configurationAttributes = new TreeMap<String, Map<String, Map<String, String>>>(); Host host = clusters.getHost(scHost.getHostName()); Map<String, Map<String, String>> configTags = findConfigurationTagsWithOverrides(cluster, host.getHostName()); createHostAction(cluster, stage, scHost, configurations, configurationAttributes, configTags, roleCommand, requestParameters, event); } } } for (String serviceName : smokeTestServices) { // Creates smoke test commands Service s = cluster.getService(serviceName); // find service component host ServiceComponent component = getClientComponentForRunningAction(cluster, s); String componentName = component != null ? component.getName() : null; String clientHost = getClientHostForRunningAction(cluster, s, component); String smokeTestRole = actionMetadata.getServiceCheckAction(serviceName); if (clientHost == null || smokeTestRole == null) { LOG.info("Nothing to do for service check as could not find role or" + " or host to run check on" + ", clusterName=" + cluster.getClusterName() + ", serviceName=" + serviceName + ", clientHost=" + clientHost + ", serviceCheckRole=" + smokeTestRole); continue; } customCommandExecutionHelper.addServiceCheckAction(stage, clientHost, smokeTestRole, nowTimestamp, serviceName, componentName, null, false, false); } RoleCommandOrder rco = getRoleCommandOrder(cluster); RoleGraph rg = roleGraphFactory.createNew(rco); rg.build(stage); requestStages.addStages(rg.getStages()); if (!componentsToEnableKerberos.isEmpty()) { Map<String, Collection<String>> serviceFilter = new HashMap<String, Collection<String>>(); Set<String> hostFilter = new HashSet<String>(); for (ServiceComponentHost scHost : componentsToEnableKerberos) { String serviceName = scHost.getServiceName(); Collection<String> componentFilter = serviceFilter.get(serviceName); if (componentFilter == null) { componentFilter = new HashSet<String>(); serviceFilter.put(serviceName, componentFilter); } componentFilter.add(scHost.getServiceComponentName()); hostFilter.add(scHost.getHostName()); } try { kerberosHelper.ensureIdentities(cluster, serviceFilter, hostFilter, null, hostsToForceKerberosOperations, requestStages, kerberosHelper.getManageIdentitiesDirective(requestProperties)); } catch (KerberosOperationException e) { throw new IllegalArgumentException(e.getMessage(), e); } } List<Stage> stages = requestStages.getStages(); LOG.debug("Created {} stages", ((stages != null) ? stages.size() : 0)); } else { LOG.debug("Created 0 stages"); } return requestStages; } private boolean hostComponentAlreadyExists(Cluster cluster, ServiceComponentHost sch) throws AmbariException { Service service = cluster.getService(sch.getServiceName()); if (service != null) { ServiceComponent serviceComponent = service.getServiceComponent(sch.getServiceComponentName()); if (serviceComponent != null) { Map<String, ServiceComponentHost> serviceComponentHostMap = serviceComponent.getServiceComponentHosts(); for (ServiceComponentHost serviceComponentHost : serviceComponentHostMap.values()) { if (serviceComponentHost.getState() == State.INSTALLED || serviceComponentHost.getState() == State.STARTED) { return true; } } } } return false; } @Override public ExecutionCommand getExecutionCommand(Cluster cluster, ServiceComponentHost scHost, RoleCommand roleCommand) throws AmbariException { Map<String, Set<String>> clusterHostInfo = StageUtils.getClusterHostInfo(cluster); String clusterHostInfoJson = StageUtils.getGson().toJson(clusterHostInfo); Map<String, String> hostParamsCmd = customCommandExecutionHelper.createDefaultHostParams(cluster); Stage stage = createNewStage(0, cluster, 1, "", clusterHostInfoJson, "{}", ""); Map<String, Map<String, String>> configTags = configHelper.getEffectiveDesiredTags(cluster, scHost.getHostName()); Map<String, Map<String, String>> configurations = configHelper.getEffectiveConfigProperties(cluster, configTags); Map<String, Map<String, Map<String, String>>> configurationAttributes = new TreeMap<String, Map<String, Map<String, String>>>(); createHostAction(cluster, stage, scHost, configurations, configurationAttributes, configTags, roleCommand, null, null); ExecutionCommand ec = stage.getExecutionCommands().get(scHost.getHostName()).get(0).getExecutionCommand(); // createHostAction does not take a hostLevelParams but creates one hostParamsCmd.putAll(ec.getHostLevelParams()); ec.getHostLevelParams().putAll(hostParamsCmd); ec.setClusterHostInfo( StageUtils.getClusterHostInfo(cluster)); if (null != cluster) { // Generate localComponents for (ServiceComponentHost sch : cluster.getServiceComponentHosts(scHost.getHostName())) { ec.getLocalComponents().add(sch.getServiceComponentName()); } } // Hack - Remove passwords from configs if ((ec.getRole().equals(Role.HIVE_CLIENT.toString()) || ec.getRole().equals(Role.WEBHCAT_SERVER.toString()) || ec.getRole().equals(Role.HCAT.toString())) && ec.getConfigurations().containsKey(Configuration.HIVE_CONFIG_TAG)) { ec.getConfigurations().get(Configuration.HIVE_CONFIG_TAG).remove(Configuration.HIVE_METASTORE_PASSWORD_PROPERTY); } // Add attributes Map<String, Map<String, Map<String, String>>> configAttributes = configHelper.getEffectiveConfigAttributes(cluster, ec.getConfigurationTags()); for (Map.Entry<String, Map<String, Map<String, String>>> attributesOccurrence : configAttributes.entrySet()) { String type = attributesOccurrence.getKey(); Map<String, Map<String, String>> attributes = attributesOccurrence.getValue(); if (ec.getConfigurationAttributes() != null) { if (!ec.getConfigurationAttributes().containsKey(type)) { ec.getConfigurationAttributes().put(type, new TreeMap<String, Map<String, String>>()); } configHelper.cloneAttributesMap(attributes, ec.getConfigurationAttributes().get(type)); } } return ec; } @Override public Set<StackConfigurationDependencyResponse> getStackConfigurationDependencies( Set<StackConfigurationDependencyRequest> requests) throws AmbariException { Set<StackConfigurationDependencyResponse> response = new HashSet<StackConfigurationDependencyResponse>(); if (requests != null) { for (StackConfigurationDependencyRequest request : requests) { String stackName = request.getStackName(); String stackVersion = request.getStackVersion(); String serviceName = request.getServiceName(); String propertyName = request.getPropertyName(); Set<StackConfigurationDependencyResponse> stackConfigurations = getStackConfigurationDependencies(request); for (StackConfigurationDependencyResponse dependencyResponse : stackConfigurations) { dependencyResponse.setStackName(stackName); dependencyResponse.setStackVersion(stackVersion); dependencyResponse.setServiceName(serviceName); dependencyResponse.setPropertyName(propertyName); } response.addAll(stackConfigurations); } } return response; } private Set<StackConfigurationDependencyResponse> getStackConfigurationDependencies(StackConfigurationDependencyRequest request) throws AmbariException { Set<StackConfigurationDependencyResponse> response = new HashSet<StackConfigurationDependencyResponse>(); String stackName = request.getStackName(); String stackVersion = request.getStackVersion(); String serviceName = request.getServiceName(); String propertyName = request.getPropertyName(); String dependencyName = request.getDependencyName(); Set<PropertyInfo> properties = ambariMetaInfo.getPropertiesByName(stackName, stackVersion, serviceName, propertyName); for (PropertyInfo property: properties) { for (PropertyDependencyInfo dependency: property.getDependedByProperties()) { if (dependencyName == null || dependency.getName().equals(dependencyName)) { response.add(dependency.convertToResponse()); } } } return response; } @Transactional void updateServiceStates( Cluster cluster, Map<State, List<Service>> changedServices, Map<State, List<ServiceComponent>> changedComps, Map<String, Map<State, List<ServiceComponentHost>>> changedScHosts, Collection<ServiceComponentHost> ignoredScHosts ) { if (changedServices != null) { for (Entry<State, List<Service>> entry : changedServices.entrySet()) { State newState = entry.getKey(); for (Service s : entry.getValue()) { if (s.isClientOnlyService() && newState == State.STARTED) { continue; } s.setDesiredState(newState); } } } if (changedComps != null) { for (Entry<State, List<ServiceComponent>> entry : changedComps.entrySet()) { State newState = entry.getKey(); for (ServiceComponent sc : entry.getValue()) { sc.setDesiredState(newState); } } } for (Map<State, List<ServiceComponentHost>> stateScHostMap : changedScHosts.values()) { for (Entry<State, List<ServiceComponentHost>> entry : stateScHostMap.entrySet()) { State newState = entry.getKey(); for (ServiceComponentHost sch : entry.getValue()) { sch.setDesiredState(newState); } } } if (ignoredScHosts != null) { for (ServiceComponentHost scHost : ignoredScHosts) { scHost.setDesiredState(scHost.getState()); } } } @Override public RequestStatusResponse createAndPersistStages(Cluster cluster, Map<String, String> requestProperties, Map<String, String> requestParameters, Map<State, List<Service>> changedServices, Map<State, List<ServiceComponent>> changedComponents, Map<String, Map<State, List<ServiceComponentHost>>> changedHosts, Collection<ServiceComponentHost> ignoredHosts, boolean runSmokeTest, boolean reconfigureClients) throws AmbariException { RequestStageContainer request = addStages(null, cluster, requestProperties, requestParameters, changedServices, changedComponents, changedHosts, ignoredHosts, runSmokeTest, reconfigureClients); request.persist(); return request.getRequestStatusResponse(); } @Override public RequestStageContainer addStages(RequestStageContainer requestStages, Cluster cluster, Map<String, String> requestProperties, Map<String, String> requestParameters, Map<State, List<Service>> changedServices, Map<State, List<ServiceComponent>> changedComponents, Map<String, Map<State, List<ServiceComponentHost>>> changedHosts, Collection<ServiceComponentHost> ignoredHosts, boolean runSmokeTest, boolean reconfigureClients) throws AmbariException { if (requestStages == null) { requestStages = new RequestStageContainer(actionManager.getNextRequestId(), null, requestFactory, actionManager); } requestStages = doStageCreation(requestStages, cluster, changedServices, changedComponents, changedHosts, requestParameters, requestProperties, runSmokeTest, reconfigureClients); Lock clusterWriteLock = cluster.getClusterGlobalLock().writeLock(); clusterWriteLock.lock(); try { updateServiceStates(cluster, changedServices, changedComponents, changedHosts, ignoredHosts); } finally { clusterWriteLock.unlock(); } return requestStages; } //todo: for now made this public since is is still used by createHostComponents //todo: delete after all host component logic is in HostComponentResourceProvider public void validateServiceComponentHostRequest(ServiceComponentHostRequest request) { if (request.getClusterName() == null || request.getClusterName().isEmpty() || request.getComponentName() == null || request.getComponentName().isEmpty() || request.getHostname() == null || request.getHostname().isEmpty()) { throw new IllegalArgumentException("Invalid arguments" + ", cluster name, component name and host name should be" + " provided"); } if (request.getAdminState() != null) { throw new IllegalArgumentException("Property adminState cannot be modified through update. Use service " + "specific DECOMMISSION action to decommision/recommission components."); } } @Override public String findServiceName(Cluster cluster, String componentName) throws AmbariException { StackId stackId = cluster.getDesiredStackVersion(); String serviceName = ambariMetaInfo.getComponentToService(stackId.getStackName(), stackId.getStackVersion(), componentName); if (LOG.isDebugEnabled()) { LOG.debug("Looking up service name for component" + ", componentName=" + componentName + ", serviceName=" + serviceName); } if (serviceName == null || serviceName.isEmpty()) { throw new AmbariException("Could not find service for component" + ", componentName=" + componentName + ", clusterName=" + cluster.getClusterName() + ", stackInfo=" + stackId.getStackId()); } return serviceName; } /** * Updates the users specified. * * @param requests the users to modify * * @throws AmbariException if the resources cannot be updated * @throws IllegalArgumentException if the authenticated user is not authorized to update all of * the requested properties */ @Override public synchronized void updateUsers(Set<UserRequest> requests) throws AmbariException, AuthorizationException { boolean isUserAdministrator = AuthorizationHelper.isAuthorized(ResourceType.AMBARI, null, RoleAuthorization.AMBARI_MANAGE_USERS); String authenticatedUsername = AuthorizationHelper.getAuthenticatedName(); for (UserRequest request : requests) { String requestedUsername = request.getUsername(); // An administrator can modify any user, else a user can only modify themself. if (!isUserAdministrator && (!authenticatedUsername.equalsIgnoreCase(requestedUsername))) { throw new AuthorizationException(); } User u = users.getAnyUser(requestedUsername); if (null == u) { continue; } if (null != request.isActive()) { // If this value is being set, make sure the authenticated user is an administrator before // allowing to change it. Only administrators should be able to change a user's active state if (!isUserAdministrator) { throw new AuthorizationException("The authenticated user is not authorized to update the requested resource property"); } users.setUserActive(u.getUserName(), request.isActive()); } if (null != request.isAdmin()) { // If this value is being set, make sure the authenticated user is an administrator before // allowing to change it. Only administrators should be able to change a user's administrative // privileges if (!isUserAdministrator) { throw new AuthorizationException("The authenticated user is not authorized to update the requested resource property"); } if (request.isAdmin()) { users.grantAdminPrivilege(u.getUserId()); } else { users.revokeAdminPrivilege(u.getUserId()); } } if (null != request.getOldPassword() && null != request.getPassword()) { users.modifyPassword(u.getUserName(), request.getOldPassword(), request.getPassword()); } } } @Override public synchronized void deleteCluster(ClusterRequest request) throws AmbariException { if (request.getClusterName() == null || request.getClusterName().isEmpty()) { // FIXME throw correct error throw new AmbariException("Invalid arguments"); } LOG.info("Received a delete cluster request" + ", clusterName=" + request.getClusterName()); if (request.getHostNames() != null) { // FIXME treat this as removing a host from a cluster? } else { // deleting whole cluster clusters.deleteCluster(request.getClusterName()); } } @Override public RequestStatusResponse deleteHostComponents( Set<ServiceComponentHostRequest> requests) throws AmbariException, AuthorizationException { Set<ServiceComponentHostRequest> expanded = new HashSet<ServiceComponentHostRequest>(); // if any request are for the whole host, they need to be expanded for (ServiceComponentHostRequest request : requests) { if (null == request.getComponentName()) { if (null == request.getClusterName() || request.getClusterName().isEmpty() || null == request.getHostname() || request.getHostname().isEmpty()) { throw new IllegalArgumentException("Cluster name and hostname must be specified."); } Cluster cluster = clusters.getCluster(request.getClusterName()); if(!AuthorizationHelper.isAuthorized(ResourceType.CLUSTER, cluster.getResourceId(), EnumSet.of(RoleAuthorization.SERVICE_ADD_DELETE_SERVICES,RoleAuthorization.HOST_ADD_DELETE_COMPONENTS))) { throw new AuthorizationException("The authenticated user is not authorized to delete service components from hosts"); } for (ServiceComponentHost sch : cluster.getServiceComponentHosts(request.getHostname())) { ServiceComponentHostRequest schr = new ServiceComponentHostRequest(request.getClusterName(), sch.getServiceName(), sch.getServiceComponentName(), sch.getHostName(), null); expanded.add(schr); } } else { expanded.add(request); } } Map<ServiceComponent, Set<ServiceComponentHost>> safeToRemoveSCHs = new HashMap<ServiceComponent, Set<ServiceComponentHost>>(); for (ServiceComponentHostRequest request : expanded) { validateServiceComponentHostRequest(request); Cluster cluster = clusters.getCluster(request.getClusterName()); if (StringUtils.isEmpty(request.getServiceName())) { request.setServiceName(findServiceName(cluster, request.getComponentName())); } if (LOG.isDebugEnabled()) { LOG.debug("Received a hostComponent DELETE request" + ", clusterName=" + request.getClusterName() + ", serviceName=" + request.getServiceName() + ", componentName=" + request.getComponentName() + ", hostname=" + request.getHostname() + ", request=" + request); } Service service = cluster.getService(request.getServiceName()); ServiceComponent component = service.getServiceComponent(request.getComponentName()); ServiceComponentHost componentHost = component.getServiceComponentHost(request.getHostname()); if (!componentHost.canBeRemoved()) { throw new AmbariException("Host Component cannot be removed" + ", clusterName=" + request.getClusterName() + ", serviceName=" + request.getServiceName() + ", componentName=" + request.getComponentName() + ", hostname=" + request.getHostname() + ", request=" + request); } // Only allow removing master/slave components in DISABLED/UNKNOWN/INSTALL_FAILED/INIT state without stages // generation. // Clients may be removed without a state check. if (!component.isClientComponent() && !componentHost.getState().isRemovableState()) { throw new AmbariException("To remove master or slave components they must be in " + "DISABLED/INIT/INSTALLED/INSTALL_FAILED/UNKNOWN state. Current=" + componentHost.getState() + "."); } setRestartRequiredServices(service, request.getComponentName()); if (!safeToRemoveSCHs.containsKey(component)) { safeToRemoveSCHs.put(component, new HashSet<ServiceComponentHost>()); } safeToRemoveSCHs.get(component).add(componentHost); } for (Entry<ServiceComponent, Set<ServiceComponentHost>> entry : safeToRemoveSCHs.entrySet()) { for (ServiceComponentHost componentHost : entry.getValue()) { String included_hostname = componentHost.getHostName(); String serviceName = entry.getKey().getServiceName(); String master_component_name = null; String slave_component_name = componentHost.getServiceComponentName(); HostComponentAdminState desiredAdminState = componentHost.getComponentAdminState(); State slaveState = componentHost.getState(); //Delete hostcomponents entry.getKey().deleteServiceComponentHosts(componentHost.getHostName()); // If deleted hostcomponents support decomission and were decommited and stopped if (AmbariCustomCommandExecutionHelper.masterToSlaveMappingForDecom.containsValue(slave_component_name) && desiredAdminState.equals(HostComponentAdminState.DECOMMISSIONED) && slaveState.equals(State.INSTALLED)) { for (Entry<String, String> entrySet : AmbariCustomCommandExecutionHelper.masterToSlaveMappingForDecom.entrySet()) { if (entrySet.getValue().equals(slave_component_name)) { master_component_name = entrySet.getKey(); } } //Clear exclud file or draining list except HBASE if (!serviceName.equals(Service.Type.HBASE.toString())) { HashMap<String, String> requestProperties = new HashMap<String, String>(); requestProperties.put("context", "Remove host " + included_hostname + " from exclude file"); requestProperties.put("exclusive", "true"); HashMap<String, String> params = new HashMap<String, String>(); params.put("included_hosts", included_hostname); params.put("slave_type", slave_component_name); params.put(AmbariCustomCommandExecutionHelper.UPDATE_EXCLUDE_FILE_ONLY, "true"); //Create filter for RECOMISSION command RequestResourceFilter resourceFilter = new RequestResourceFilter(serviceName, master_component_name, null); //Create request for RECOMISSION command ExecuteActionRequest actionRequest = new ExecuteActionRequest( entry.getKey().getClusterName(), AmbariCustomCommandExecutionHelper.DECOMMISSION_COMMAND_NAME, null, Collections.singletonList(resourceFilter), null, params, true); //Send request createAction(actionRequest, requestProperties); } //Mark master component as needed to restart for remove host info from components UI Cluster cluster = clusters.getCluster(entry.getKey().getClusterName()); Service service = cluster.getService(serviceName); ServiceComponent sc = service.getServiceComponent(master_component_name); if (sc != null && sc.isMasterComponent()) { for (ServiceComponentHost sch : sc.getServiceComponentHosts().values()) { sch.setRestartRequired(true); } } } } } // set restartRequired flag for monitoring services if (!safeToRemoveSCHs.isEmpty()) { setMonitoringServicesRestartRequired(requests); } return null; } @Override public void deleteUsers(Set<UserRequest> requests) throws AmbariException { for (UserRequest r : requests) { if (LOG.isDebugEnabled()) { LOG.debug("Received a delete user request" + ", username=" + r.getUsername()); } User u = users.getAnyUser(r.getUsername()); if (null != u) { users.removeUser(u); } } } @Override public void deleteGroups(Set<GroupRequest> requests) throws AmbariException { for (GroupRequest request: requests) { LOG.debug("Received a delete group request, groupname=" + request.getGroupName()); final Group group = users.getGroup(request.getGroupName()); if (group != null) { users.removeGroup(group); } } } @Override public void deleteMembers(java.util.Set<MemberRequest> requests) throws AmbariException { for (MemberRequest request : requests) { LOG.debug("Received a delete member request, " + request); users.removeMemberFromGroup(request.getGroupName(), request.getUserName()); } } /** * Get a request response for the given request ids. Note that this method * fully populates a request resource including the set of task sub-resources * in the request response. */ RequestStatusResponse getRequestStatusResponse(long requestId) { RequestStatusResponse response = new RequestStatusResponse(requestId); List<HostRoleCommand> hostRoleCommands = actionManager.getRequestTasks(requestId); response.setRequestContext(actionManager.getRequestContext(requestId)); List<ShortTaskStatus> tasks = new ArrayList<ShortTaskStatus>(); for (HostRoleCommand hostRoleCommand : hostRoleCommands) { tasks.add(new ShortTaskStatus(hostRoleCommand)); } response.setTasks(tasks); return response; } @Override public Set<ClusterResponse> getClusters(Set<ClusterRequest> requests) throws AmbariException, AuthorizationException { Set<ClusterResponse> response = new HashSet<ClusterResponse>(); for (ClusterRequest request : requests) { try { response.addAll(getClusters(request)); } catch (ClusterNotFoundException e) { if (requests.size() == 1) { // only throw exception if 1 request. // there will be > 1 request in case of OR predicate throw e; } } } return response; } @Override public Set<ServiceComponentHostResponse> getHostComponents( Set<ServiceComponentHostRequest> requests) throws AmbariException { LOG.debug("Processing requests: {}", requests); Set<ServiceComponentHostResponse> response = new HashSet<ServiceComponentHostResponse>(); for (ServiceComponentHostRequest request : requests) { try { response.addAll(getHostComponents(request)); } catch (ServiceComponentHostNotFoundException e) { if (requests.size() == 1) { // only throw exception if 1 request. // there will be > 1 request in case of OR predicate throw e; } else { LOG.debug("Ignoring not found exception due to other requests", e); } } catch (ServiceNotFoundException e) { if (requests.size() == 1) { // only throw exception if 1 request. // there will be > 1 request in case of OR predicate // In 'OR' case, a host_component may be included in predicate // that has no corresponding service throw e; } else { LOG.debug("Ignoring not found exception due to other requests", e); } } catch (ServiceComponentNotFoundException e) { if (requests.size() == 1) { // only throw exception if 1 request. // there will be > 1 request in case of OR predicate // In 'OR' case, a host_component may be included in predicate // that has no corresponding component throw e; } else { LOG.debug("Ignoring not found exception due to other requests", e); } } catch (ParentObjectNotFoundException e) { // If there is only one request, always throw exception. // There will be > 1 request in case of OR predicate. // For HostNotFoundException, only throw exception if host_name is // provided in URL. If host_name is part of query, don't throw exception. boolean throwException = true; if (requests.size() > 1 && HostNotFoundException.class.isInstance(e.getCause())) { for (ServiceComponentHostRequest r : requests) { if (r.getHostname() == null) { // host_name provided in query since all requests don't have host_name set throwException = false; LOG.debug("HostNotFoundException ignored", e); break; } } } if (throwException) { throw e; } } } return response; } @Override public Set<ConfigurationResponse> getConfigurations( Set<ConfigurationRequest> requests) throws AmbariException { Set<ConfigurationResponse> response = new HashSet<ConfigurationResponse>(); for (ConfigurationRequest request : requests) { response.addAll(getConfigurations(request)); } return response; } @Override public Set<ServiceConfigVersionResponse> getServiceConfigVersions(Set<ServiceConfigVersionRequest> requests) throws AmbariException { Set<ServiceConfigVersionResponse> responses = new LinkedHashSet<ServiceConfigVersionResponse>(); for (ServiceConfigVersionRequest request : requests) { responses.addAll(getServiceConfigVersions(request)); } return responses; } private Set<ServiceConfigVersionResponse> getServiceConfigVersions(ServiceConfigVersionRequest request) throws AmbariException { if (request.getClusterName() == null) { throw new IllegalArgumentException("Invalid arguments, cluster name" + " should not be null"); } Cluster cluster = clusters.getCluster(request.getClusterName()); Set<ServiceConfigVersionResponse> result = new LinkedHashSet<ServiceConfigVersionResponse>(); for (ServiceConfigVersionResponse response : cluster.getServiceConfigVersions()) { if (request.getServiceName() != null && !StringUtils.equals(request.getServiceName(), response.getServiceName())) { continue; } if (request.getVersion() != null && NumberUtils.compare(request.getVersion(), response.getVersion()) != 0) { continue; } if (request.getUserName() != null && !StringUtils.equals(request.getUserName(), response.getUserName())) { continue; } result.add(response); } return result; } @Override public Set<UserResponse> getUsers(Set<UserRequest> requests) throws AmbariException, AuthorizationException { Set<UserResponse> responses = new HashSet<UserResponse>(); for (UserRequest r : requests) { if (LOG.isDebugEnabled()) { LOG.debug("Received a getUsers request" + ", userRequest=" + r.toString()); } String requestedUsername = r.getUsername(); String authenticatedUsername = AuthorizationHelper.getAuthenticatedName(); // A user resource may be retrieved by an administrator or the same user. if(!AuthorizationHelper.isAuthorized(ResourceType.AMBARI, null, RoleAuthorization.AMBARI_MANAGE_USERS)) { if (null == requestedUsername) { // Since the authenticated user is not the administrator, force only that user's resource // to be returned requestedUsername = authenticatedUsername; } else if (!requestedUsername.equalsIgnoreCase(authenticatedUsername)) { // Since the authenticated user is not the administrator and is asking for a different user, // throw an AuthorizationException throw new AuthorizationException(); } } // get them all if (null == requestedUsername) { for (User u : users.getAllUsers()) { UserResponse resp = new UserResponse(u.getUserName(), u.getUserType(), u.isLdapUser(), u.isActive(), u .isAdmin()); resp.setGroups(new HashSet<String>(u.getGroups())); responses.add(resp); } } else { User u = users.getAnyUser(requestedUsername); if (null == u) { if (requests.size() == 1) { // only throw exceptin if there is a single request // if there are multiple requests, this indicates an OR predicate throw new ObjectNotFoundException("Cannot find user '" + requestedUsername + "'"); } } else { UserResponse resp = new UserResponse(u.getUserName(), u.getUserType(), u.isLdapUser(), u.isActive(), u .isAdmin()); resp.setGroups(new HashSet<String>(u.getGroups())); responses.add(resp); } } } return responses; } @Override public Set<GroupResponse> getGroups(Set<GroupRequest> requests) throws AmbariException { final Set<GroupResponse> responses = new HashSet<GroupResponse>(); for (GroupRequest request: requests) { LOG.debug("Received a getGroups request, groupRequest=" + request.toString()); // get them all if (null == request.getGroupName()) { for (Group group: users.getAllGroups()) { final GroupResponse response = new GroupResponse(group.getGroupName(), group.isLdapGroup()); responses.add(response); } } else { final Group group = users.getGroup(request.getGroupName()); if (null == group) { if (requests.size() == 1) { // only throw exception if there is a single request // if there are multiple requests, this indicates an OR predicate throw new ObjectNotFoundException("Cannot find group '" + request.getGroupName() + "'"); } } else { final GroupResponse response = new GroupResponse(group.getGroupName(), group.isLdapGroup()); responses.add(response); } } } return responses; } @Override public void updateGroups(Set<GroupRequest> requests) throws AmbariException { // currently no group updates are supported } protected String getClientHostForRunningAction(Cluster cluster, Service service, ServiceComponent serviceComponent) throws AmbariException { if (serviceComponent != null && !serviceComponent.getServiceComponentHosts().isEmpty()) { Set<String> candidateHosts = serviceComponent.getServiceComponentHosts().keySet(); filterHostsForAction(candidateHosts, service, cluster, Resource.Type.Cluster); return getHealthyHost(candidateHosts); } return null; } protected ServiceComponent getClientComponentForRunningAction(Cluster cluster, Service service) throws AmbariException { /* * We assume Cluster level here. That means that we never run service * checks on clients/hosts that are in maintenance state. * That also means that we can not run service check if the only host * that has client component is in maintenance state */ StackId stackId = service.getDesiredStackVersion(); ComponentInfo compInfo = ambariMetaInfo.getService(stackId.getStackName(), stackId.getStackVersion(), service.getName()).getClientComponent(); if (compInfo != null) { try { ServiceComponent serviceComponent = service.getServiceComponent(compInfo.getName()); if (!serviceComponent.getServiceComponentHosts().isEmpty()) { return serviceComponent; } } catch (ServiceComponentNotFoundException e) { LOG.warn("Could not find required component to run action" + ", clusterName=" + cluster.getClusterName() + ", serviceName=" + service.getName() + ", componentName=" + compInfo.getName()); } } // any component will do Map<String, ServiceComponent> components = service.getServiceComponents(); if (!components.isEmpty()) { for (ServiceComponent serviceComponent : components.values()) { if (!serviceComponent.getServiceComponentHosts().isEmpty()) { return serviceComponent; } } } return null; } /** * Utility method that filters out hosts from set based on their maintenance * state status. */ protected void filterHostsForAction(Set<String> candidateHosts, Service service, final Cluster cluster, final Resource.Type level) throws AmbariException { Set<String> ignoredHosts = maintenanceStateHelper.filterHostsInMaintenanceState( candidateHosts, new MaintenanceStateHelper.HostPredicate() { @Override public boolean shouldHostBeRemoved(final String hostname) throws AmbariException { Host host = clusters.getHost(hostname); return !maintenanceStateHelper.isOperationAllowed( host, cluster.getClusterId(), level); } } ); LOG.debug("Ignoring hosts when selecting available hosts for action" + " due to maintenance state." + "Ignored hosts =" + ignoredHosts + ", cluster=" + cluster.getClusterName() + ", service=" + service.getName()); } @Override public String getHealthyHost(Set<String> hostList) throws AmbariException { String hostName = null; for (String candidateHostName : hostList) { hostName = candidateHostName; Host candidateHost = clusters.getHost(hostName); if (candidateHost.getState() == HostState.HEALTHY) { break; } } return hostName; } @Override public RequestStatusResponse createAction(ExecuteActionRequest actionRequest, Map<String, String> requestProperties) throws AmbariException { String clusterName = actionRequest.getClusterName(); String requestContext = ""; if (requestProperties != null) { requestContext = requestProperties.get(REQUEST_CONTEXT_PROPERTY); if (requestContext == null) { // guice needs a non-null value as there is no way to mark this parameter @Nullable requestContext = ""; } } Cluster cluster = null; if (null != clusterName) { cluster = clusters.getCluster(clusterName); LOG.info("Received action execution request" + ", clusterName=" + actionRequest.getClusterName() + ", request=" + actionRequest.toString()); } ActionExecutionContext actionExecContext = getActionExecutionContext(actionRequest); if (actionRequest.isCommand()) { customCommandExecutionHelper.validateAction(actionRequest); } else { actionExecutionHelper.validateAction(actionRequest); } // TODO Alejandro, Called First. insert params.version. Called during Rebalance HDFS, ZOOKEEPER Restart, Zookeeper Service Check. long requestId = actionManager.getNextRequestId(); RequestStageContainer requestStageContainer = new RequestStageContainer( requestId, null, requestFactory, actionManager, actionRequest); StackId stackId = null; if (null != cluster) { stackId = cluster.getDesiredStackVersion(); } ExecuteCommandJson jsons = customCommandExecutionHelper.getCommandJson(actionExecContext, cluster, stackId); String commandParamsForStage = jsons.getCommandParamsForStage(); Map<String, String> commandParamsStage = gson.fromJson(commandParamsForStage, new TypeToken<Map<String, String>>() {}.getType()); // Ensure that the specified requestContext (if any) is set as the request context if (!requestContext.isEmpty()) { requestStageContainer.setRequestContext(requestContext); } // replace password references in requestProperties SecretReference.replaceReferencesWithPasswords(commandParamsStage, cluster); // If the request is to perform the Kerberos service check, set up the stages to // ensure that the (cluster-level) smoke user principal and keytab is available on all hosts boolean kerberosServiceCheck = Role.KERBEROS_SERVICE_CHECK.name().equals(actionRequest.getCommandName()); if (kerberosServiceCheck) { // Parse the command parameters into a map so that additional values may be added to it try { requestStageContainer = kerberosHelper.createTestIdentity(cluster, commandParamsStage, requestStageContainer); } catch (KerberosOperationException e) { throw new IllegalArgumentException(e.getMessage(), e); } } commandParamsForStage = gson.toJson(commandParamsStage); Stage stage = createNewStage(requestStageContainer.getLastStageId(), cluster, requestId, requestContext, jsons.getClusterHostInfo(), commandParamsForStage, jsons.getHostParamsForStage()); if (actionRequest.isCommand()) { customCommandExecutionHelper.addExecutionCommandsToStage(actionExecContext, stage, requestProperties); } else { actionExecutionHelper.addExecutionCommandsToStage(actionExecContext, stage); } RoleGraph rg; if (null != cluster) { RoleCommandOrder rco = getRoleCommandOrder(cluster); rg = roleGraphFactory.createNew(rco); } else { rg = roleGraphFactory.createNew(); } rg.build(stage); List<Stage> stages = rg.getStages(); if (stages != null && !stages.isEmpty()) { requestStageContainer.addStages(stages); } // If the request is to perform the Kerberos service check, delete the test-specific principal // and keytab that was created for this service check if (kerberosServiceCheck) { // Parse the command parameters into a map so that existing values may be accessed and // additional values may be added to it. commandParamsStage = gson.fromJson(commandParamsForStage, new TypeToken<Map<String, String>>() { }.getType()); try { requestStageContainer = kerberosHelper.deleteTestIdentity(cluster, commandParamsStage, requestStageContainer); } catch (KerberosOperationException e) { throw new IllegalArgumentException(e.getMessage(), e); } } requestStageContainer.persist(); return requestStageContainer.getRequestStatusResponse(); } @Override public Set<StackResponse> getStacks(Set<StackRequest> requests) throws AmbariException { Set<StackResponse> response = new HashSet<StackResponse>(); for (StackRequest request : requests) { try { response.addAll(getStacks(request)); } catch (StackAccessException e) { if (requests.size() == 1) { // only throw exception if 1 request. // there will be > 1 request in case of OR predicate throw e; } } } return response; } private Set<StackResponse> getStacks(StackRequest request) throws AmbariException { Set<StackResponse> response; String stackName = request.getStackName(); if (stackName != null) { // this will throw an exception if the stack doesn't exist ambariMetaInfo.getStacks(stackName); response = Collections.singleton(new StackResponse(stackName)); } else { Collection<StackInfo> supportedStacks = ambariMetaInfo.getStacks(); response = new HashSet<StackResponse>(); for (StackInfo stack: supportedStacks) { response.add(new StackResponse(stack.getName())); } } return response; } @Override public synchronized RequestStatusResponse updateStacks() throws AmbariException { try { ambariMetaInfo.init(); } catch (AmbariException e) { throw e; } catch (Exception e) { throw new AmbariException( "Ambari Meta Information can't be read from the stack root directory"); } return null; } @Override public Set<RepositoryResponse> getRepositories(Set<RepositoryRequest> requests) throws AmbariException { Set<RepositoryResponse> response = new HashSet<RepositoryResponse>(); for (RepositoryRequest request : requests) { try { String stackName = request.getStackName(); String stackVersion = request.getStackVersion(); Set<RepositoryResponse> repositories = getRepositories(request); for (RepositoryResponse repositoryResponse : repositories) { if (repositoryResponse.getStackName() == null) { repositoryResponse.setStackName(stackName); } if (repositoryResponse.getStackVersion() == null) { repositoryResponse.setStackVersion(stackVersion); } repositoryResponse.setClusterVersionId(request.getClusterVersionId()); } response.addAll(repositories); } catch (StackAccessException e) { if (requests.size() == 1) { // only throw exception if 1 request. // there will be > 1 request in case of OR predicate throw e; } } } return response; } private Set<RepositoryResponse> getRepositories(RepositoryRequest request) throws AmbariException { String stackName = request.getStackName(); String stackVersion = request.getStackVersion(); String osType = request.getOsType(); String repoId = request.getRepoId(); Long repositoryVersionId = request.getRepositoryVersionId(); String versionDefinitionId = request.getVersionDefinitionId(); // !!! when asking for Repository responses for a versionDefinition, it is either for // an established repo version (a Long) OR from the in-memory generated ones (a String) if (null == repositoryVersionId && null != versionDefinitionId) { if (NumberUtils.isDigits(versionDefinitionId)) { repositoryVersionId = Long.valueOf(versionDefinitionId); } } Set<RepositoryResponse> responses = new HashSet<RepositoryResponse>(); if (repositoryVersionId != null) { final RepositoryVersionEntity repositoryVersion = repositoryVersionDAO.findByPK(repositoryVersionId); if (repositoryVersion != null) { for (OperatingSystemEntity operatingSystem: repositoryVersion.getOperatingSystems()) { if (operatingSystem.getOsType().equals(osType)) { for (RepositoryEntity repository: operatingSystem.getRepositories()) { final RepositoryResponse response = new RepositoryResponse(repository.getBaseUrl(), osType, repository.getRepositoryId(), repository.getName(), "", "", ""); if (null != versionDefinitionId) { response.setVersionDefinitionId(versionDefinitionId); } else { response.setRepositoryVersionId(repositoryVersionId); } response.setStackName(repositoryVersion.getStackName()); response.setStackVersion(repositoryVersion.getStackVersion()); responses.add(response); } break; } } } } else if (null != versionDefinitionId) { VersionDefinitionXml xml = ambariMetaInfo.getVersionDefinition(versionDefinitionId); if (null == xml) { throw new AmbariException(String.format("Version identified by %s does not exist", versionDefinitionId)); } StackId stackId = new StackId(xml.release.stackId); for (RepositoryXml.Os os : xml.repositoryInfo.getOses()) { for (RepositoryXml.Repo repo : os.getRepos()) { RepositoryResponse resp = new RepositoryResponse(repo.getBaseUrl(), os.getFamily(), repo.getRepoId(), repo.getRepoName(), repo.getMirrorsList(), repo.getBaseUrl(), repo.getLatestUri()); resp.setVersionDefinitionId(versionDefinitionId); resp.setStackName(stackId.getStackName()); resp.setStackVersion(stackId.getStackVersion()); responses.add(resp); } } } else { if (repoId == null) { List<RepositoryInfo> repositories = ambariMetaInfo.getRepositories(stackName, stackVersion, osType); for (RepositoryInfo repository: repositories) { responses.add(repository.convertToResponse()); } } else { RepositoryInfo repository = ambariMetaInfo.getRepository(stackName, stackVersion, osType, repoId); responses = Collections.singleton(repository.convertToResponse()); } } return responses; } @Override public void updateRepositories(Set<RepositoryRequest> requests) throws AmbariException { for (RepositoryRequest rr : requests) { if (null == rr.getStackName() || rr.getStackName().isEmpty()) { throw new AmbariException("Stack name must be specified."); } if (null == rr.getStackVersion() || rr.getStackVersion().isEmpty()) { throw new AmbariException("Stack version must be specified."); } if (null == rr.getOsType() || rr.getOsType().isEmpty()) { throw new AmbariException("OS type must be specified."); } if (null == rr.getRepoId() || rr.getRepoId().isEmpty()) { throw new AmbariException("Repo ID must be specified."); } if (null != rr.getBaseUrl()) { if (rr.isVerifyBaseUrl()) { verifyRepository(rr); } if (rr.getRepositoryVersionId() != null) { throw new AmbariException("Can't directly update repositories in repository_version, update the repository_version instead"); } ambariMetaInfo.updateRepoBaseURL(rr.getStackName(), rr.getStackVersion(), rr.getOsType(), rr.getRepoId(), rr.getBaseUrl()); } } } @Override public void verifyRepositories(Set<RepositoryRequest> requests) throws AmbariException { for (RepositoryRequest request: requests) { if (request.getBaseUrl() == null) { throw new AmbariException("Base url is missing for request " + request); } verifyRepository(request); } } /** * Verifies single repository, see {{@link #verifyRepositories(Set)}. * * @param request request * @throws AmbariException if verification fails */ private void verifyRepository(RepositoryRequest request) throws AmbariException { URLStreamProvider usp = new URLStreamProvider(REPO_URL_CONNECT_TIMEOUT, REPO_URL_READ_TIMEOUT, null, null, null); usp.setSetupTruststoreForHttps(false); RepositoryInfo repositoryInfo = ambariMetaInfo.getRepository(request.getStackName(), request.getStackVersion(), request.getOsType(), request.getRepoId()); String repoName = repositoryInfo.getRepoName(); String errorMessage = null; Exception e = null; String[] suffixes = configs.getRepoValidationSuffixes(request.getOsType()); for (String suffix : suffixes) { String formatted_suffix = String.format(suffix, repoName); String spec = request.getBaseUrl().trim(); // This logic is to identify if the end of baseurl has a slash ('/') and/or the beginning of suffix String (e.g. "/repodata/repomd.xml") // has a slash and they can form a good url. // e.g. "http://baseurl.com/" + "/repodata/repomd.xml" becomes "http://baseurl.com/repodata/repomd.xml" but not "http://baseurl.com//repodata/repomd.xml" if (spec.charAt(spec.length() - 1) != '/' && formatted_suffix.charAt(0) != '/') { spec = spec + "/" + formatted_suffix; } else if (spec.charAt(spec.length() - 1) == '/' && formatted_suffix.charAt(0) == '/') { spec = spec + formatted_suffix.substring(1); } else { spec = spec + formatted_suffix; } // if spec contains "file://" then check local file system. final String FILE_SCHEME = "file://"; if(spec.toLowerCase().startsWith(FILE_SCHEME)){ String filePath = spec.substring(FILE_SCHEME.length()); File f = new File(filePath); if(!f.exists()){ errorMessage = "Could not access base url . " + spec + " . "; e = new FileNotFoundException(errorMessage); break; } }else{ try { IOUtils.readLines(usp.readFrom(spec)); } catch (IOException ioe) { e = ioe; errorMessage = "Could not access base url . " + request.getBaseUrl() + " . "; if (LOG.isDebugEnabled()) { errorMessage += ioe; } else { errorMessage += ioe.getMessage(); } break; } } } if (e != null) { LOG.error(errorMessage); throw new IllegalArgumentException(errorMessage, e); } } @Override public Set<StackVersionResponse> getStackVersions( Set<StackVersionRequest> requests) throws AmbariException { Set<StackVersionResponse> response = new HashSet<StackVersionResponse>(); for (StackVersionRequest request : requests) { String stackName = request.getStackName(); try { Set<StackVersionResponse> stackVersions = getStackVersions(request); for (StackVersionResponse stackVersionResponse : stackVersions) { stackVersionResponse.setStackName(stackName); } response.addAll(stackVersions); } catch (StackAccessException e) { if (requests.size() == 1) { // only throw exception if 1 request. // there will be > 1 request in case of OR predicate throw e; } } } return response; } private Set<StackVersionResponse> getStackVersions(StackVersionRequest request) throws AmbariException { Set<StackVersionResponse> response; String stackName = request.getStackName(); String stackVersion = request.getStackVersion(); if (stackVersion != null) { StackInfo stackInfo = ambariMetaInfo.getStack(stackName, stackVersion); response = Collections.singleton(stackInfo.convertToResponse()); } else { try { Collection<StackInfo> stackInfos = ambariMetaInfo.getStacks(stackName); response = new HashSet<StackVersionResponse>(); for (StackInfo stackInfo: stackInfos) { response.add(stackInfo.convertToResponse()); } } catch (StackAccessException e) { response = Collections.emptySet(); } } return response; } @Override public Set<StackServiceResponse> getStackServices( Set<StackServiceRequest> requests) throws AmbariException { Set<StackServiceResponse> response = new HashSet<StackServiceResponse>(); for (StackServiceRequest request : requests) { String stackName = request.getStackName(); String stackVersion = request.getStackVersion(); try { Set<StackServiceResponse> stackServices = getStackServices(request); for (StackServiceResponse stackServiceResponse : stackServices) { stackServiceResponse.setStackName(stackName); stackServiceResponse.setStackVersion(stackVersion); } response.addAll(stackServices); } catch (StackAccessException e) { if (requests.size() == 1) { // only throw exception if 1 request. // there will be > 1 request in case of OR predicate throw e; } } } return response; } private Set<StackServiceResponse> getStackServices(StackServiceRequest request) throws AmbariException { Set<StackServiceResponse> response; String stackName = request.getStackName(); String stackVersion = request.getStackVersion(); String serviceName = request.getServiceName(); if (serviceName != null) { ServiceInfo service = ambariMetaInfo.getService(stackName, stackVersion, serviceName); response = Collections.singleton(new StackServiceResponse(service)); } else { Map<String, ServiceInfo> services = ambariMetaInfo.getServices(stackName, stackVersion); response = new HashSet<StackServiceResponse>(); for (ServiceInfo service : services.values()) { response.add(new StackServiceResponse(service)); } } return response; } @Override public Set<StackConfigurationResponse> getStackLevelConfigurations( Set<StackLevelConfigurationRequest> requests) throws AmbariException { Set<StackConfigurationResponse> response = new HashSet<StackConfigurationResponse>(); for (StackLevelConfigurationRequest request : requests) { String stackName = request.getStackName(); String stackVersion = request.getStackVersion(); Set<StackConfigurationResponse> stackConfigurations = getStackLevelConfigurations(request); for (StackConfigurationResponse stackConfigurationResponse : stackConfigurations) { stackConfigurationResponse.setStackName(stackName); stackConfigurationResponse.setStackVersion(stackVersion); } response.addAll(stackConfigurations); } return response; } private Set<StackConfigurationResponse> getStackLevelConfigurations( StackLevelConfigurationRequest request) throws AmbariException { Set<StackConfigurationResponse> response = new HashSet<StackConfigurationResponse>(); String stackName = request.getStackName(); String stackVersion = request.getStackVersion(); String propertyName = request.getPropertyName(); Set<PropertyInfo> properties; if (propertyName != null) { properties = ambariMetaInfo.getStackPropertiesByName(stackName, stackVersion, propertyName); } else { properties = ambariMetaInfo.getStackProperties(stackName, stackVersion); } for (PropertyInfo property: properties) { response.add(property.convertToResponse()); } return response; } @Override public Set<StackConfigurationResponse> getStackConfigurations( Set<StackConfigurationRequest> requests) throws AmbariException { Set<StackConfigurationResponse> response = new HashSet<StackConfigurationResponse>(); for (StackConfigurationRequest request : requests) { String stackName = request.getStackName(); String stackVersion = request.getStackVersion(); String serviceName = request.getServiceName(); Set<StackConfigurationResponse> stackConfigurations = getStackConfigurations(request); for (StackConfigurationResponse stackConfigurationResponse : stackConfigurations) { stackConfigurationResponse.setStackName(stackName); stackConfigurationResponse.setStackVersion(stackVersion); stackConfigurationResponse.setServiceName(serviceName); } response.addAll(stackConfigurations); } return response; } private Set<StackConfigurationResponse> getStackConfigurations( StackConfigurationRequest request) throws AmbariException { Set<StackConfigurationResponse> response = new HashSet<StackConfigurationResponse>(); String stackName = request.getStackName(); String stackVersion = request.getStackVersion(); String serviceName = request.getServiceName(); String propertyName = request.getPropertyName(); Set<PropertyInfo> properties; if (propertyName != null) { properties = ambariMetaInfo.getPropertiesByName(stackName, stackVersion, serviceName, propertyName); } else { properties = ambariMetaInfo.getServiceProperties(stackName, stackVersion, serviceName); } for (PropertyInfo property: properties) { response.add(property.convertToResponse()); } return response; } @Override public Set<StackServiceComponentResponse> getStackComponents( Set<StackServiceComponentRequest> requests) throws AmbariException { Set<StackServiceComponentResponse> response = new HashSet<StackServiceComponentResponse>(); for (StackServiceComponentRequest request : requests) { String stackName = request.getStackName(); String stackVersion = request.getStackVersion(); String serviceName = request.getServiceName(); try { Set<StackServiceComponentResponse> stackComponents = getStackComponents(request); for (StackServiceComponentResponse stackServiceComponentResponse : stackComponents) { stackServiceComponentResponse.setStackName(stackName); stackServiceComponentResponse.setStackVersion(stackVersion); stackServiceComponentResponse.setServiceName(serviceName); } response.addAll(stackComponents); } catch (StackAccessException e) { if (requests.size() == 1) { // only throw exception if 1 request. // there will be > 1 request in case of OR predicate throw e; } } } return response; } private Set<StackServiceComponentResponse> getStackComponents( StackServiceComponentRequest request) throws AmbariException { Set<StackServiceComponentResponse> response; String stackName = request.getStackName(); String stackVersion = request.getStackVersion(); String serviceName = request.getServiceName(); String componentName = request.getComponentName(); if (componentName != null) { ComponentInfo component = ambariMetaInfo.getComponent(stackName, stackVersion, serviceName, componentName); response = Collections.singleton(new StackServiceComponentResponse( component)); } else { List<ComponentInfo> components = ambariMetaInfo.getComponentsByService(stackName, stackVersion, serviceName); response = new HashSet<StackServiceComponentResponse>(); for (ComponentInfo component: components) { response.add(new StackServiceComponentResponse(component)); } } return response; } @Override public Set<OperatingSystemResponse> getOperatingSystems( Set<OperatingSystemRequest> requests) throws AmbariException { Set<OperatingSystemResponse> response = new HashSet<OperatingSystemResponse>(); for (OperatingSystemRequest request : requests) { try { String stackName = request.getStackName(); String stackVersion = request.getStackVersion(); Set<OperatingSystemResponse> stackOperatingSystems = getOperatingSystems(request); for (OperatingSystemResponse operatingSystemResponse : stackOperatingSystems) { if (operatingSystemResponse.getStackName() == null) { operatingSystemResponse.setStackName(stackName); } if (operatingSystemResponse.getStackVersion() == null) { operatingSystemResponse.setStackVersion(stackVersion); } } response.addAll(stackOperatingSystems); } catch (StackAccessException e) { if (requests.size() == 1) { // only throw exception if 1 request. // there will be > 1 request in case of OR predicate throw e; } } } return response; } private Set<OperatingSystemResponse> getOperatingSystems( OperatingSystemRequest request) throws AmbariException { Set<OperatingSystemResponse> responses = new HashSet<OperatingSystemResponse>(); String stackName = request.getStackName(); String stackVersion = request.getStackVersion(); String osType = request.getOsType(); Long repositoryVersionId = request.getRepositoryVersionId(); String versionDefinitionId = request.getVersionDefinitionId(); // !!! when asking for OperatingSystem responses for a versionDefinition, it is either for // an established repo version (a Long) OR from the in-memory generated ones (a String) if (null == repositoryVersionId && null != versionDefinitionId) { if (NumberUtils.isDigits(versionDefinitionId)) { repositoryVersionId = Long.valueOf(versionDefinitionId); } } if (repositoryVersionId != null) { final RepositoryVersionEntity repositoryVersion = repositoryVersionDAO.findByPK(repositoryVersionId); if (repositoryVersion != null) { for (OperatingSystemEntity operatingSystem: repositoryVersion.getOperatingSystems()) { final OperatingSystemResponse response = new OperatingSystemResponse(operatingSystem.getOsType()); if (null != versionDefinitionId) { response.setVersionDefinitionId(repositoryVersionId.toString()); } else { response.setRepositoryVersionId(repositoryVersionId); } response.setStackName(repositoryVersion.getStackName()); response.setStackVersion(repositoryVersion.getStackVersion()); response.setAmbariManagedRepos(operatingSystem.isAmbariManagedRepos()); responses.add(response); } } } else if (null != versionDefinitionId) { VersionDefinitionXml xml = ambariMetaInfo.getVersionDefinition(versionDefinitionId); if (null == xml) { throw new AmbariException(String.format("Version identified by %s does not exist", versionDefinitionId)); } StackId stackId = new StackId(xml.release.stackId); for (RepositoryXml.Os os : xml.repositoryInfo.getOses()) { OperatingSystemResponse resp = new OperatingSystemResponse(os.getFamily()); resp.setVersionDefinitionId(versionDefinitionId); resp.setStackName(stackId.getStackName()); resp.setStackVersion(stackId.getStackVersion()); responses.add(resp); } } else { if (osType != null) { OperatingSystemInfo operatingSystem = ambariMetaInfo.getOperatingSystem(stackName, stackVersion, osType); responses = Collections.singleton(operatingSystem.convertToResponse()); } else { Set<OperatingSystemInfo> operatingSystems = ambariMetaInfo.getOperatingSystems(stackName, stackVersion); for (OperatingSystemInfo operatingSystem : operatingSystems) { responses.add(operatingSystem.convertToResponse()); } } } return responses; } @Override public String getAuthName() { return AuthorizationHelper.getAuthenticatedName(configs.getAnonymousAuditName()); } @Override public Set<RootServiceResponse> getRootServices( Set<RootServiceRequest> requests) throws AmbariException { Set<RootServiceResponse> response = new HashSet<RootServiceResponse>(); for (RootServiceRequest request : requests) { try { response.addAll(getRootServices(request)); } catch (AmbariException e) { if (requests.size() == 1) { // only throw exception if 1 request. // there will be > 1 request in case of OR predicate throw e; } } } return response; } private Set<RootServiceResponse> getRootServices (RootServiceRequest request) throws AmbariException{ return rootServiceResponseFactory.getRootServices(request); } @Override public Set<RootServiceComponentResponse> getRootServiceComponents( Set<RootServiceComponentRequest> requests) throws AmbariException { Set<RootServiceComponentResponse> response = new HashSet<RootServiceComponentResponse>(); for (RootServiceComponentRequest request : requests) { String serviceName = request.getServiceName(); try { Set<RootServiceComponentResponse> rootServiceComponents = getRootServiceComponents(request); for (RootServiceComponentResponse serviceComponentResponse : rootServiceComponents) { serviceComponentResponse.setServiceName(serviceName); } response.addAll(rootServiceComponents); } catch (AmbariException e) { if (requests.size() == 1) { // only throw exception if 1 request. // there will be > 1 request in case of OR predicate throw e; } } } return response; } private Set<RootServiceComponentResponse> getRootServiceComponents( RootServiceComponentRequest request) throws AmbariException{ return rootServiceResponseFactory.getRootServiceComponents(request); } @Override public Clusters getClusters() { return clusters; } @Override public ConfigHelper getConfigHelper() { return configHelper; } @Override public AmbariMetaInfo getAmbariMetaInfo() { return ambariMetaInfo; } @Override public ServiceFactory getServiceFactory() { return serviceFactory; } @Override public ServiceComponentFactory getServiceComponentFactory() { return serviceComponentFactory; } @Override public ConfigGroupFactory getConfigGroupFactory() { return configGroupFactory; } @Override public RoleGraphFactory getRoleGraphFactory() { return roleGraphFactory; } @Override public AbstractRootServiceResponseFactory getRootServiceResponseFactory() { return rootServiceResponseFactory; } @Override public ActionManager getActionManager() { return actionManager; } @Override public String getJdkResourceUrl() { return jdkResourceUrl; } @Override public String getJavaHome() { return javaHome; } @Override public String getJDKName() { return jdkName; } @Override public String getJCEName() { return jceName; } @Override public String getServerDB() { return serverDB; } @Override public String getOjdbcUrl() { return ojdbcUrl; } @Override public String getMysqljdbcUrl() { return mysqljdbcUrl; } @Override public Map<String, String> getRcaParameters() { String hostName = StageUtils.getHostName(); String url = configs.getRcaDatabaseUrl(); if (url.contains(Configuration.HOSTNAME_MACRO)) { url = url.replace(Configuration.HOSTNAME_MACRO, hostsMap.getHostMap(hostName)); } Map<String, String> rcaParameters = new HashMap<String, String>(); rcaParameters.put(AMBARI_DB_RCA_URL, url); rcaParameters.put(AMBARI_DB_RCA_DRIVER, configs.getRcaDatabaseDriver()); rcaParameters.put(AMBARI_DB_RCA_USERNAME, configs.getRcaDatabaseUser()); rcaParameters.put(AMBARI_DB_RCA_PASSWORD, configs.getRcaDatabasePassword()); return rcaParameters; } @Override public boolean checkLdapConfigured() { return ldapDataPopulator.isLdapEnabled(); } @Override public LdapSyncDto getLdapSyncInfo() throws AmbariException { return ldapDataPopulator.getLdapSyncInfo(); } @Override public boolean isLdapSyncInProgress() { return ldapSyncInProgress; } @Override public synchronized LdapBatchDto synchronizeLdapUsersAndGroups( LdapSyncRequest userRequest, LdapSyncRequest groupRequest) throws AmbariException { ldapSyncInProgress = true; try { final LdapBatchDto batchInfo = new LdapBatchDto(); if (userRequest != null) { switch (userRequest.getType()) { case ALL: ldapDataPopulator.synchronizeAllLdapUsers(batchInfo); break; case EXISTING: ldapDataPopulator.synchronizeExistingLdapUsers(batchInfo); break; case SPECIFIC: ldapDataPopulator.synchronizeLdapUsers(userRequest.getPrincipalNames(), batchInfo); break; } } if (groupRequest != null) { switch (groupRequest.getType()) { case ALL: ldapDataPopulator.synchronizeAllLdapGroups(batchInfo); break; case EXISTING: ldapDataPopulator.synchronizeExistingLdapGroups(batchInfo); break; case SPECIFIC: ldapDataPopulator.synchronizeLdapGroups(groupRequest.getPrincipalNames(), batchInfo); break; } } users.processLdapSync(batchInfo); return batchInfo; } finally { ldapSyncInProgress = false; } } @SuppressWarnings("unchecked") @Override public void initializeWidgetsAndLayouts(Cluster cluster, Service service) throws AmbariException { StackId stackId = cluster.getDesiredStackVersion(); Type widgetLayoutType = new TypeToken<Map<String, List<WidgetLayout>>>(){}.getType(); try { Map<String, Object> widgetDescriptor = null; StackInfo stackInfo = ambariMetaInfo.getStack(stackId.getStackName(), stackId.getStackVersion()); if (service != null) { // Service widgets ServiceInfo serviceInfo = stackInfo.getService(service.getName()); File widgetDescriptorFile = serviceInfo.getWidgetsDescriptorFile(); if (widgetDescriptorFile != null && widgetDescriptorFile.exists()) { try { widgetDescriptor = gson.fromJson(new FileReader(widgetDescriptorFile), widgetLayoutType); } catch (Exception ex) { String msg = "Error loading widgets from file: " + widgetDescriptorFile; LOG.error(msg, ex); throw new AmbariException(msg); } } } else { // Cluster level widgets String widgetDescriptorFileLocation = stackInfo.getWidgetsDescriptorFileLocation(); if (widgetDescriptorFileLocation != null) { File widgetDescriptorFile = new File(widgetDescriptorFileLocation); if (widgetDescriptorFile.exists()) { try { widgetDescriptor = gson.fromJson(new FileReader(widgetDescriptorFile), widgetLayoutType); } catch (Exception ex) { String msg = "Error loading widgets from file: " + widgetDescriptorFile; LOG.error(msg, ex); throw new AmbariException(msg); } } } } if (widgetDescriptor != null) { LOG.debug("Loaded widget descriptor: " + widgetDescriptor); for (Object artifact : widgetDescriptor.values()) { List<WidgetLayout> widgetLayouts = (List<WidgetLayout>) artifact; createWidgetsAndLayouts(cluster, widgetLayouts); } } } catch (Exception e) { throw new AmbariException("Error creating stack widget artifacts. " + (service != null ? "Service: " + service.getName() + ", " : "") + "Cluster: " + cluster.getClusterName(), e); } } private WidgetEntity addIfNotExistsWidgetEntity(WidgetLayoutInfo layoutInfo, ClusterEntity clusterEntity, String user, long createTime) { List<WidgetEntity> createdEntities = widgetDAO.findByName(clusterEntity.getClusterId(), layoutInfo.getWidgetName(), user, layoutInfo.getDefaultSectionName()); if (createdEntities == null || createdEntities.isEmpty()) { WidgetEntity widgetEntity = new WidgetEntity(); widgetEntity.setClusterId(clusterEntity.getClusterId()); widgetEntity.setClusterEntity(clusterEntity); widgetEntity.setScope(WidgetResourceProvider.SCOPE.CLUSTER.name()); widgetEntity.setWidgetName(layoutInfo.getWidgetName()); widgetEntity.setDefaultSectionName(layoutInfo.getDefaultSectionName()); widgetEntity.setAuthor(user); widgetEntity.setDescription(layoutInfo.getDescription()); widgetEntity.setTimeCreated(createTime); widgetEntity.setWidgetType(layoutInfo.getType()); widgetEntity.setMetrics(gson.toJson(layoutInfo.getMetricsInfo())); widgetEntity.setProperties(gson.toJson(layoutInfo.getProperties())); widgetEntity.setWidgetValues(gson.toJson(layoutInfo.getValues())); widgetEntity.setListWidgetLayoutUserWidgetEntity(new LinkedList<WidgetLayoutUserWidgetEntity>()); LOG.info("Creating cluster widget with: name = " + layoutInfo.getWidgetName() + ", type = " + layoutInfo.getType() + ", " + "cluster = " + clusterEntity.getClusterName()); // Persisting not visible widgets // visible one will be cascaded on creation of layout if (!layoutInfo.isVisible()) { widgetDAO.create(widgetEntity); } return widgetEntity; } else { LOG.warn("Skip creating widget from stack artifact since one or more " + "already exits with name = " + layoutInfo.getWidgetName() + ", " + "clusterId = " + clusterEntity.getClusterId() + ", user = " + user); } return null; } @Transactional void createWidgetsAndLayouts(Cluster cluster, List<WidgetLayout> widgetLayouts) { String user = "ambari"; Long clusterId = cluster.getClusterId(); ClusterEntity clusterEntity = clusterDAO.findById(clusterId); if (clusterEntity == null) { return; } Long now = System.currentTimeMillis(); if (widgetLayouts != null) { for (WidgetLayout widgetLayout : widgetLayouts) { List<WidgetLayoutEntity> existingEntities = widgetLayoutDAO.findByName(clusterId, widgetLayout.getLayoutName(), user); // Update layout properties if the layout exists if (existingEntities == null || existingEntities.isEmpty()) { WidgetLayoutEntity layoutEntity = new WidgetLayoutEntity(); layoutEntity.setClusterEntity(clusterEntity); layoutEntity.setClusterId(clusterId); layoutEntity.setLayoutName(widgetLayout.getLayoutName()); layoutEntity.setDisplayName(widgetLayout.getDisplayName()); layoutEntity.setSectionName(widgetLayout.getSectionName()); layoutEntity.setScope(WidgetLayoutResourceProvider.SCOPE.CLUSTER.name()); layoutEntity.setUserName(user); List<WidgetLayoutUserWidgetEntity> widgetLayoutUserWidgetEntityList = new LinkedList<WidgetLayoutUserWidgetEntity>(); int order = 0; for (WidgetLayoutInfo layoutInfo : widgetLayout.getWidgetLayoutInfoList()) { if (layoutInfo.getDefaultSectionName() == null) { layoutInfo.setDefaultSectionName(layoutEntity.getSectionName()); } WidgetEntity widgetEntity = addIfNotExistsWidgetEntity(layoutInfo, clusterEntity, user, now); // Add to layout if visibility is true and widget was newly added if (widgetEntity != null && layoutInfo.isVisible()) { WidgetLayoutUserWidgetEntity widgetLayoutUserWidgetEntity = new WidgetLayoutUserWidgetEntity(); widgetLayoutUserWidgetEntity.setWidget(widgetEntity); widgetLayoutUserWidgetEntity.setWidgetOrder(order++); widgetLayoutUserWidgetEntity.setWidgetLayout(layoutEntity); widgetLayoutUserWidgetEntityList.add(widgetLayoutUserWidgetEntity); widgetEntity.getListWidgetLayoutUserWidgetEntity().add(widgetLayoutUserWidgetEntity); } } layoutEntity.setListWidgetLayoutUserWidgetEntity(widgetLayoutUserWidgetEntityList); widgetLayoutDAO.createWithFlush(layoutEntity); } else { if (existingEntities.size() > 1) { LOG.warn("Skip updating layout since multiple widget layouts " + "found with: name = " + widgetLayout.getLayoutName() + ", " + "user = " + user + ", cluster = " + cluster.getClusterName()); } else { WidgetLayoutEntity existingLayoutEntity = existingEntities.iterator().next(); existingLayoutEntity.setSectionName(widgetLayout.getSectionName()); existingLayoutEntity.setDisplayName(widgetLayout.getDisplayName()); // Add new widgets to end of the existing ones List<WidgetLayoutUserWidgetEntity> layoutUserWidgetEntities = existingLayoutEntity.getListWidgetLayoutUserWidgetEntity(); if (layoutUserWidgetEntities == null) { layoutUserWidgetEntities = new LinkedList<WidgetLayoutUserWidgetEntity>(); existingLayoutEntity.setListWidgetLayoutUserWidgetEntity(layoutUserWidgetEntities); } int order = layoutUserWidgetEntities.size() - 1; List<WidgetLayoutInfo> layoutInfoList = widgetLayout.getWidgetLayoutInfoList(); if (layoutInfoList != null && !layoutInfoList.isEmpty()) { for (WidgetLayoutInfo layoutInfo : layoutInfoList) { WidgetEntity widgetEntity = addIfNotExistsWidgetEntity(layoutInfo, clusterEntity, user, now); if (widgetEntity != null && layoutInfo.isVisible()) { WidgetLayoutUserWidgetEntity widgetLayoutUserWidgetEntity = new WidgetLayoutUserWidgetEntity(); widgetLayoutUserWidgetEntity.setWidget(widgetEntity); widgetLayoutUserWidgetEntity.setWidgetOrder(order++); widgetLayoutUserWidgetEntity.setWidgetLayout(existingLayoutEntity); layoutUserWidgetEntities.add(widgetLayoutUserWidgetEntity); widgetEntity.getListWidgetLayoutUserWidgetEntity().add(widgetLayoutUserWidgetEntity); } } } widgetLayoutDAO.mergeWithFlush(existingLayoutEntity); } } } } } @Override public TimelineMetricCacheProvider getTimelineMetricCacheProvider() { return injector.getInstance(TimelineMetricCacheProvider.class); } @Override public KerberosHelper getKerberosHelper() { return kerberosHelper; } @Override public CredentialStoreService getCredentialStoreService() { return credentialStoreService; } /** * Queries the CredentialStoreService to gather properties about it. * <p/> * In particular, the details about which storage facilities are avaialble are returned via Boolean * properties. * * @return a map of properties */ public Map<String,String> getCredentialStoreServiceProperties() { Map<String,String> properties = new HashMap<String, String>(); properties.put("storage.persistent", String.valueOf(credentialStoreService.isInitialized(CredentialStoreType.PERSISTED))); properties.put("storage.temporary", String.valueOf(credentialStoreService.isInitialized(CredentialStoreType.TEMPORARY))); return properties; } /** * Validates that the authenticated user can set a service's (run-as) user and group. * <p/> * If the user is authorized to set service users and groups, than this method exits quickly. * If the user is not authorized to set service users and groups, then this method verifies that * the properties of types USER and GROUP have not been changed. If they have been, an * AuthorizationException is thrown. * * @param cluster the relevant cluster * @param request the configuration request * @throws AuthorizationException if the user is not authorized to perform this operation */ protected void validateAuthorizationToUpdateServiceUsersAndGroups(Cluster cluster, ConfigurationRequest request) throws AuthorizationException { // If the authenticated user is not authorized to set service users or groups, make sure the // relevant properties are not changed. However, if the user is authorized to set service // users and groups, there is nothing to check. if (!AuthorizationHelper.isAuthorized(ResourceType.CLUSTER, cluster.getResourceId(), RoleAuthorization.AMBARI_SET_SERVICE_USERS_GROUPS)) { Map<String, String> requestProperties = request.getProperties(); if (requestProperties != null) { Map<PropertyInfo.PropertyType, Set<String>> propertyTypes = cluster.getConfigPropertiesTypes( request.getType()); // Create a composite set of properties to check... Set<String> propertiesToCheck = new HashSet<String>(); Set<String> userProperties = propertyTypes.get(PropertyType.USER); if (userProperties != null) { propertiesToCheck.addAll(userProperties); } Set<String> groupProperties = propertyTypes.get(PropertyType.GROUP); if (groupProperties != null) { propertiesToCheck.addAll(groupProperties); } // If there are no USER or GROUP type properties, skip the validation check... if (!propertiesToCheck.isEmpty()) { Config existingConfig = cluster.getDesiredConfigByType(request.getType()); Map<String, String> existingProperties = (existingConfig == null) ? null : existingConfig.getProperties(); if (existingProperties == null) { existingProperties = Collections.emptyMap(); } for (String propertyName : propertiesToCheck) { String existingProperty = existingProperties.get(propertyName); String requestProperty = requestProperties.get(propertyName); // If the properties don't match, so thrown an authorization exception if ((existingProperty == null) ? (requestProperty != null) : !existingProperty.equals(requestProperty)) { throw new AuthorizationException("The authenticated user is not authorized to set service user and groups"); } } } } } } }
AMBARI-16630. Extend logging for ActionQueue's retry logic (part2). (Daniel Gergely via stoader)
ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java
AMBARI-16630. Extend logging for ActionQueue's retry logic (part2). (Daniel Gergely via stoader)
<ide><path>mbari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java <ide> retryEnabled = commandMayBeRetried; <ide> } <ide> } <add> LOG.info("Auto retry setting for {}-{} on {} is retryEnabled={} and retryMaxTime={}", serviceName, componentName, scHost.getHostName(), retryEnabled, retryMaxTime); <ide> } <ide> commandParams.put(MAX_DURATION_OF_RETRIES, Integer.toString(retryMaxTime)); <ide> commandParams.put(COMMAND_RETRY_ENABLED, Boolean.toString(retryEnabled));
Java
epl-1.0
error: pathspec 'tests/com.redhat.ceylon.eclipse.ui.test/src/com/redhat/ceylon/eclipse/ui/test/swtbot/MainBuildTests.java' did not match any file(s) known to git
ea52c31dc7457d0323f2c00d1661920d8c3f05e4
1
rohitmohan96/ceylon-ide-eclipse,rohitmohan96/ceylon-ide-eclipse
package com.redhat.ceylon.eclipse.ui.test.swtbot; import static org.hamcrest.MatcherAssert.assertThat; import static org.junit.Assert.assertEquals; import java.util.List; import java.util.Map; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import org.eclipse.core.resources.IFile; import org.eclipse.core.resources.IMarker; import org.eclipse.core.resources.IResource; import org.eclipse.core.runtime.CoreException; import org.eclipse.core.runtime.IProgressMonitor; import org.eclipse.jdt.core.JavaCore; import org.eclipse.jdt.internal.ui.javaeditor.JavaEditor; import org.eclipse.swt.SWT; import org.eclipse.swt.widgets.Display; import org.eclipse.swt.widgets.Event; import org.eclipse.swtbot.eclipse.finder.SWTWorkbenchBot; import org.eclipse.swtbot.eclipse.finder.widgets.SWTBotEclipseEditor; import org.eclipse.swtbot.eclipse.finder.widgets.SWTBotEditor; import org.eclipse.swtbot.swt.finder.junit.SWTBotJunit4ClassRunner; import org.eclipse.swtbot.swt.finder.keyboard.KeyboardFactory; import org.eclipse.swtbot.swt.finder.keyboard.KeyboardStrategy; import org.eclipse.swtbot.swt.finder.keyboard.Keystrokes; import org.eclipse.swtbot.swt.finder.widgets.SWTBotLink; import org.hamcrest.CoreMatchers; import org.hamcrest.Matcher; import org.hamcrest.Matchers; import org.hamcrest.core.IsCollectionContaining; import org.hamcrest.core.IsEqual; import org.junit.After; import org.junit.Assert; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; import org.junit.runner.RunWith; import com.redhat.ceylon.eclipse.code.editor.Util; import com.redhat.ceylon.eclipse.core.builder.CeylonBuilder; import com.redhat.ceylon.eclipse.core.builder.CeylonBuilder.CeylonBuildHook; import com.redhat.ceylon.eclipse.ui.test.AbstractMultiProjectTest; import com.redhat.ceylon.eclipse.ui.test.Utils; @RunWith(SWTBotJunit4ClassRunner.class) public class MainBuildTests extends AbstractMultiProjectTest { private static SWTWorkbenchBot bot; @BeforeClass public static void beforeClass() { bot = Utils.createBot(); importAndBuild(); } @After public void resetWorkbench() { Utils.resetWorkbench(bot); } protected void openInEditor(String fileName) { final IFile runFile = mainProject.getFile(fileName); Display.getDefault().syncExec(new Runnable() { public void run() { try { new SWTWorkbenchBot(); Util.gotoLocation(runFile, 0); } catch (Exception ex) { ex.printStackTrace(); } } }); } @Test public void bug589_AddedJavaMethodNotSeen() throws InterruptedException, CoreException { openInEditor("src/mainModule/run.ceylon"); openInEditor("javaSrc/mainModule/JavaClassInCeylonModule_Main_Ceylon_Project.java"); SWTBotEditor editor = bot.editorByTitle("JavaClassInCeylonModule_Main_Ceylon_Project.java"); Assert.assertNotNull(editor); SWTBotEclipseEditor javaFileEditor = editor.toTextEditor(); javaFileEditor.show(); assertEquals("Wrong line 4 in file JavaClassInCeylonModule_Main_Ceylon_Project.java : ", javaFileEditor.getLines().get(3).trim(), "public class JavaClassInCeylonModule_Main_Ceylon_Project {"); String javaEditorText = javaFileEditor.getText(); javaFileEditor.insertText(4, 0, "public void newMethodTotest() {}\n"); Utils.CeylonBuildSummary buildSummary = new Utils.CeylonBuildSummary(mainProjectJDT); buildSummary.install(); javaFileEditor.save(); try { buildSummary.waitForBuildEnd(10); editor = bot.editorByTitle("run.ceylon"); Assert.assertNotNull(editor); SWTBotEclipseEditor ceylonFileEditor = editor.toTextEditor(); ceylonFileEditor.show(); assertEquals("Bad line 18 in run.ceylon : ", ceylonFileEditor.getLines().get(17).trim(), "value v5 = JavaClassInCeylonModule_Main_Ceylon_Project();"); String ceylonEditorText = ceylonFileEditor.getText(); ceylonFileEditor.insertText(18, 0,"v5.newMethodToTest();\n"); /* ceylonFileEditor.navigateTo(18, 3); List<String> proposals = javaFileEditor.getAutoCompleteProposals(""); assertThat("The new method of the Java class should be proposed", proposals, new IsCollectionContaining(new IsEqual("test()"))); */ buildSummary = new Utils.CeylonBuildSummary(mainProjectJDT); ceylonFileEditor.save(); try { buildSummary.waitForBuildEnd(10); assertThat("The build should not have any error", Utils.getProjectErrorMarkers(mainProject), Matchers.empty()); } finally { ceylonFileEditor.setText(ceylonEditorText); ceylonFileEditor.saveAndClose(); } } finally { javaFileEditor.setText(javaEditorText); javaFileEditor.saveAndClose(); } } }
tests/com.redhat.ceylon.eclipse.ui.test/src/com/redhat/ceylon/eclipse/ui/test/swtbot/MainBuildTests.java
Test for #589 ( bug589_AddedJavaMethodNotSeen() ).
tests/com.redhat.ceylon.eclipse.ui.test/src/com/redhat/ceylon/eclipse/ui/test/swtbot/MainBuildTests.java
Test for #589 ( bug589_AddedJavaMethodNotSeen() ).
<ide><path>ests/com.redhat.ceylon.eclipse.ui.test/src/com/redhat/ceylon/eclipse/ui/test/swtbot/MainBuildTests.java <add>package com.redhat.ceylon.eclipse.ui.test.swtbot; <add> <add> <add>import static org.hamcrest.MatcherAssert.assertThat; <add>import static org.junit.Assert.assertEquals; <add> <add>import java.util.List; <add>import java.util.Map; <add>import java.util.concurrent.CountDownLatch; <add>import java.util.concurrent.TimeUnit; <add> <add>import org.eclipse.core.resources.IFile; <add>import org.eclipse.core.resources.IMarker; <add>import org.eclipse.core.resources.IResource; <add>import org.eclipse.core.runtime.CoreException; <add>import org.eclipse.core.runtime.IProgressMonitor; <add>import org.eclipse.jdt.core.JavaCore; <add>import org.eclipse.jdt.internal.ui.javaeditor.JavaEditor; <add>import org.eclipse.swt.SWT; <add>import org.eclipse.swt.widgets.Display; <add>import org.eclipse.swt.widgets.Event; <add>import org.eclipse.swtbot.eclipse.finder.SWTWorkbenchBot; <add>import org.eclipse.swtbot.eclipse.finder.widgets.SWTBotEclipseEditor; <add>import org.eclipse.swtbot.eclipse.finder.widgets.SWTBotEditor; <add>import org.eclipse.swtbot.swt.finder.junit.SWTBotJunit4ClassRunner; <add>import org.eclipse.swtbot.swt.finder.keyboard.KeyboardFactory; <add>import org.eclipse.swtbot.swt.finder.keyboard.KeyboardStrategy; <add>import org.eclipse.swtbot.swt.finder.keyboard.Keystrokes; <add>import org.eclipse.swtbot.swt.finder.widgets.SWTBotLink; <add>import org.hamcrest.CoreMatchers; <add>import org.hamcrest.Matcher; <add>import org.hamcrest.Matchers; <add>import org.hamcrest.core.IsCollectionContaining; <add>import org.hamcrest.core.IsEqual; <add>import org.junit.After; <add>import org.junit.Assert; <add>import org.junit.Before; <add>import org.junit.BeforeClass; <add>import org.junit.Test; <add>import org.junit.runner.RunWith; <add> <add>import com.redhat.ceylon.eclipse.code.editor.Util; <add>import com.redhat.ceylon.eclipse.core.builder.CeylonBuilder; <add>import com.redhat.ceylon.eclipse.core.builder.CeylonBuilder.CeylonBuildHook; <add>import com.redhat.ceylon.eclipse.ui.test.AbstractMultiProjectTest; <add>import com.redhat.ceylon.eclipse.ui.test.Utils; <add> <add>@RunWith(SWTBotJunit4ClassRunner.class) <add>public class MainBuildTests extends AbstractMultiProjectTest { <add> private static SWTWorkbenchBot bot; <add> <add> @BeforeClass <add> public static void beforeClass() { <add> bot = Utils.createBot(); <add> importAndBuild(); <add> } <add> <add> @After <add> public void resetWorkbench() { <add> Utils.resetWorkbench(bot); <add> } <add> <add> protected void openInEditor(String fileName) { <add> final IFile runFile = mainProject.getFile(fileName); <add> Display.getDefault().syncExec(new Runnable() { <add> public void run() { <add> try { <add> new SWTWorkbenchBot(); <add> Util.gotoLocation(runFile, 0); <add> } catch (Exception ex) { <add> ex.printStackTrace(); <add> } <add> } <add> }); <add> } <add> <add> <add> <add> @Test <add> public void bug589_AddedJavaMethodNotSeen() throws InterruptedException, CoreException { <add> openInEditor("src/mainModule/run.ceylon"); <add> openInEditor("javaSrc/mainModule/JavaClassInCeylonModule_Main_Ceylon_Project.java"); <add> <add> SWTBotEditor editor = bot.editorByTitle("JavaClassInCeylonModule_Main_Ceylon_Project.java"); <add> Assert.assertNotNull(editor); <add> SWTBotEclipseEditor javaFileEditor = editor.toTextEditor(); <add> javaFileEditor.show(); <add> assertEquals("Wrong line 4 in file JavaClassInCeylonModule_Main_Ceylon_Project.java : ", javaFileEditor.getLines().get(3).trim(), "public class JavaClassInCeylonModule_Main_Ceylon_Project {"); <add> String javaEditorText = javaFileEditor.getText(); <add> javaFileEditor.insertText(4, 0, "public void newMethodTotest() {}\n"); <add> <add> Utils.CeylonBuildSummary buildSummary = new Utils.CeylonBuildSummary(mainProjectJDT); <add> buildSummary.install(); <add> javaFileEditor.save(); <add> try { <add> buildSummary.waitForBuildEnd(10); <add> <add> editor = bot.editorByTitle("run.ceylon"); <add> Assert.assertNotNull(editor); <add> SWTBotEclipseEditor ceylonFileEditor = editor.toTextEditor(); <add> ceylonFileEditor.show(); <add> assertEquals("Bad line 18 in run.ceylon : ", ceylonFileEditor.getLines().get(17).trim(), "value v5 = JavaClassInCeylonModule_Main_Ceylon_Project();"); <add> String ceylonEditorText = ceylonFileEditor.getText(); <add> ceylonFileEditor.insertText(18, 0,"v5.newMethodToTest();\n"); <add> <add> /* <add> ceylonFileEditor.navigateTo(18, 3); <add> List<String> proposals = javaFileEditor.getAutoCompleteProposals(""); <add> assertThat("The new method of the Java class should be proposed", <add> proposals, <add> new IsCollectionContaining(new IsEqual("test()"))); <add> */ <add> <add> buildSummary = new Utils.CeylonBuildSummary(mainProjectJDT); <add> ceylonFileEditor.save(); <add> try { <add> buildSummary.waitForBuildEnd(10); <add> assertThat("The build should not have any error", <add> Utils.getProjectErrorMarkers(mainProject), <add> Matchers.empty()); <add> } <add> finally { <add> ceylonFileEditor.setText(ceylonEditorText); <add> ceylonFileEditor.saveAndClose(); <add> } <add> } <add> finally { <add> javaFileEditor.setText(javaEditorText); <add> javaFileEditor.saveAndClose(); <add> } <add> } <add> <add>}
Java
apache-2.0
9732e04c60fcaf7f9869024f7676a81af6f25359
0
leventov/druid,pjain1/druid,pjain1/druid,implydata/druid,druid-io/druid,deltaprojects/druid,pjain1/druid,Fokko/druid,Fokko/druid,michaelschiff/druid,druid-io/druid,mghosh4/druid,knoguchi/druid,mghosh4/druid,leventov/druid,mghosh4/druid,nishantmonu51/druid,jon-wei/druid,Fokko/druid,michaelschiff/druid,nishantmonu51/druid,mghosh4/druid,implydata/druid,deltaprojects/druid,nishantmonu51/druid,Fokko/druid,mghosh4/druid,monetate/druid,monetate/druid,implydata/druid,pjain1/druid,himanshug/druid,jon-wei/druid,leventov/druid,monetate/druid,druid-io/druid,nishantmonu51/druid,himanshug/druid,mghosh4/druid,jon-wei/druid,knoguchi/druid,leventov/druid,druid-io/druid,gianm/druid,monetate/druid,pjain1/druid,nishantmonu51/druid,jon-wei/druid,himanshug/druid,knoguchi/druid,michaelschiff/druid,michaelschiff/druid,himanshug/druid,Fokko/druid,jon-wei/druid,deltaprojects/druid,deltaprojects/druid,gianm/druid,gianm/druid,deltaprojects/druid,michaelschiff/druid,implydata/druid,gianm/druid,pjain1/druid,michaelschiff/druid,deltaprojects/druid,monetate/druid,jon-wei/druid,knoguchi/druid,druid-io/druid,gianm/druid,monetate/druid,implydata/druid,monetate/druid,gianm/druid,mghosh4/druid,jon-wei/druid,gianm/druid,himanshug/druid,pjain1/druid,Fokko/druid,knoguchi/druid,michaelschiff/druid,leventov/druid,Fokko/druid,implydata/druid,deltaprojects/druid,nishantmonu51/druid,nishantmonu51/druid
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.druid.indexer.updater; import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.base.Supplier; import org.apache.druid.metadata.MetadataStorageConnectorConfig; import org.apache.druid.metadata.MetadataStorageTablesConfig; import org.apache.druid.metadata.PasswordProvider; import javax.validation.constraints.NotNull; /** */ public class MetadataStorageUpdaterJobSpec implements Supplier<MetadataStorageConnectorConfig> { @JsonProperty("type") @NotNull public String type; @JsonProperty("connectURI") public String connectURI; @JsonProperty("user") public String user; @JsonProperty("password") private PasswordProvider passwordProvider; @JsonProperty("segmentTable") public String segmentTable; public String getSegmentTable() { return segmentTable; } public String getType() { return type; } @Override public MetadataStorageConnectorConfig get() { return new MetadataStorageConnectorConfig() { @Override public String getConnectURI() { return connectURI; } @Override public String getUser() { return user; } @Override public String getPassword() { return passwordProvider == null ? null : passwordProvider.getPassword(); } }; } //Note: Currently it only supports configured segmentTable, other tables should be added if needed //by the code using this public MetadataStorageTablesConfig getMetadataStorageTablesConfig() { return new MetadataStorageTablesConfig( null, null, null, segmentTable, null, null, null, null, null, null, null ); } }
indexing-hadoop/src/main/java/org/apache/druid/indexer/updater/MetadataStorageUpdaterJobSpec.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.druid.indexer.updater; import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.base.Supplier; import org.apache.druid.metadata.MetadataStorageConnectorConfig; import org.apache.druid.metadata.MetadataStorageTablesConfig; import org.apache.druid.metadata.PasswordProvider; import javax.validation.constraints.NotNull; /** */ public class MetadataStorageUpdaterJobSpec implements Supplier<MetadataStorageConnectorConfig> { @JsonProperty("type") @NotNull public String type; @JsonProperty("connectURI") public String connectURI; @JsonProperty("user") public String user; @JsonProperty("password") private PasswordProvider passwordProvider; @JsonProperty("segmentTable") public String segmentTable; public String getSegmentTable() { return segmentTable; } public String getType() { return type; } @Override public MetadataStorageConnectorConfig get() { return new MetadataStorageConnectorConfig() { @Override public String getConnectURI() { return connectURI; } @Override public String getUser() { return user; } @Override public String getPassword() { return passwordProvider == null ? null : passwordProvider.getPassword(); } }; } //Note: Currently it only supports configured segmentTable, other tables should be added if needed //by the code using this public MetadataStorageTablesConfig getMetadataStorageTablesConfig() { return new MetadataStorageTablesConfig( null, null, segmentTable, null, null, null, null, null, null, null, null ); } }
Pass in segmentTable correctly (#7492)
indexing-hadoop/src/main/java/org/apache/druid/indexer/updater/MetadataStorageUpdaterJobSpec.java
Pass in segmentTable correctly (#7492)
<ide><path>ndexing-hadoop/src/main/java/org/apache/druid/indexer/updater/MetadataStorageUpdaterJobSpec.java <ide> return new MetadataStorageTablesConfig( <ide> null, <ide> null, <add> null, <ide> segmentTable, <del> null, <ide> null, <ide> null, <ide> null,
Java
agpl-3.0
edffcd2beb068fe8b112130f476227591eb1647c
0
duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test
d6e8c79e-2e60-11e5-9284-b827eb9e62be
hello.java
d6e3525a-2e60-11e5-9284-b827eb9e62be
d6e8c79e-2e60-11e5-9284-b827eb9e62be
hello.java
d6e8c79e-2e60-11e5-9284-b827eb9e62be
<ide><path>ello.java <del>d6e3525a-2e60-11e5-9284-b827eb9e62be <add>d6e8c79e-2e60-11e5-9284-b827eb9e62be
Java
mit
b45108a84cc70d43b4a1ff163dd3819c5e043553
0
joshua-decoder/thrax,jweese/thrax,tdomhan/thrax,joshua-decoder/thrax,dowobeha/thrax,tdomhan/thrax,jweese/thrax,jweese/thrax,joshua-decoder/thrax,joshua-decoder/thrax,tdomhan/thrax
package edu.jhu.thrax.extraction; import java.util.Collection; import java.util.Arrays; import java.util.Set; import java.util.HashSet; import java.util.ArrayList; import java.util.Queue; import java.util.LinkedList; import edu.jhu.thrax.datatypes.*; import edu.jhu.thrax.util.Vocabulary; import edu.jhu.thrax.features.Feature; import edu.jhu.thrax.ThraxConfig; /** * This class extracts Hiero-style SCFG rules. The inputs that are needed * are "source" "target" and "alignment", which are the source and target * sides of a parallel corpus, and an alignment between each of the sentences. */ public class HieroRuleExtractor implements RuleExtractor { public static String name = "hiero"; public String [] requiredInputs() { return new String [] { ThraxConfig.SOURCE, ThraxConfig.TARGET, ThraxConfig.ALIGNMENT }; } public int INIT_LENGTH_LIMIT = 10; public int SOURCE_LENGTH_LIMIT = 5; public int NT_LIMIT = 2; public int LEXICAL_MINIMUM = 1; public boolean ALLOW_ADJACENT_NTS = false; public boolean ALLOW_LOOSE_BOUNDS = false; public static final String X = "X"; public static final int X_ID = Vocabulary.getId(X); private ArrayList<Feature> features; private int featureLength; /** * Default constructor. All it does is to initialize the list of * features to an empty list. */ public HieroRuleExtractor() { features = new ArrayList<Feature>(); featureLength = 0; ALLOW_ADJACENT_NTS = ThraxConfig.opts.containsKey(ThraxConfig.ADJACENT); ALLOW_LOOSE_BOUNDS = ThraxConfig.opts.containsKey(ThraxConfig.LOOSE); } public Set<Rule> extract(Object [] inputs) { if (inputs.length < 3) { return null; } int [] source = (int []) inputs[0]; int [] target = (int []) inputs[1]; Alignment alignment = (Alignment) inputs[2]; PhrasePair [][] phrasesByStart = initialPhrasePairs(source, target, alignment); Queue<Rule> q = new LinkedList<Rule>(); for (int i = 0; i < source.length; i++) q.offer(new Rule(source, target, alignment, i, NT_LIMIT)); return processQueue(q, phrasesByStart); } public void addFeature(Feature f) { features.add(f); featureLength += f.length(); } public void score(Rule r) { r.scores = new double[featureLength]; int idx = 0; for (Feature f : features) { System.arraycopy(f.score(r), 0, r.scores, idx, f.length()); idx += f.length(); } } protected Set<Rule> processQueue(Queue<Rule> q, PhrasePair [][] phrasesByStart) { Set<Rule> rules = new HashSet<Rule>(); while (q.peek() != null) { Rule r = q.poll(); if (isWellFormed(r)) { for (Rule s : getLabelVariants(r)) { for (Feature feat : features) feat.noteExtraction(s); rules.add(s); } } if (r.appendPoint > phrasesByStart.length - 1) continue; if (phrasesByStart[r.appendPoint] == null) continue; for (PhrasePair pp : phrasesByStart[r.appendPoint]) { if (pp.sourceEnd - r.rhs.sourceStart > INIT_LENGTH_LIMIT || (r.rhs.targetStart >= 0 && pp.targetEnd - r.rhs.targetStart > INIT_LENGTH_LIMIT)) continue; if (r.numNTs < NT_LIMIT && r.numNTs + r.numTerminals < SOURCE_LENGTH_LIMIT && (!r.sourceEndsWithNT || ALLOW_ADJACENT_NTS)) { Rule s = r.copy(); s.extendWithNonterminal(pp); q.offer(s); } if (r.numNTs + r.numTerminals + pp.sourceEnd - pp.sourceStart <= SOURCE_LENGTH_LIMIT && (r.appendPoint == r.rhs.sourceStart || r.sourceEndsWithNT)) { Rule s = r.copy(); s.extendWithTerminals(pp); q.offer(s); } } } return rules; } protected boolean isWellFormed(Rule r) { if (r.rhs.targetStart < 0) return false; for (int i = r.rhs.targetStart; i < r.rhs.targetEnd; i++) { if (r.targetLex[i] < 0) return false; } return (r.alignedWords >= LEXICAL_MINIMUM); } private Collection<Rule> variantSet = new HashSet<Rule>(1); protected Collection<Rule> getLabelVariants(Rule r) { variantSet.clear(); r.lhs = X_ID; Arrays.fill(r.nts, X_ID); variantSet.add(r); return variantSet; } private PhrasePair [][] initialPhrasePairs(int [] f, int [] e, Alignment a) { PhrasePair [][] result = new PhrasePair[f.length][]; ArrayList<PhrasePair> list = new ArrayList<PhrasePair>(); for (int i = 0; i < f.length; i++) { list.clear(); int maxlen = f.length - i < INIT_LENGTH_LIMIT ? f.length - i : INIT_LENGTH_LIMIT; for (int len = 1; len <= maxlen; len++) { if (!ALLOW_LOOSE_BOUNDS && (!a.sourceIsAligned(i) || !a.sourceIsAligned(i+len-1))) continue; PhrasePair pp = a.getPairFromSource(i, i+len); if (pp != null && pp.targetEnd - pp.targetStart <= INIT_LENGTH_LIMIT) { list.add(pp); } } result[i] = new PhrasePair[list.size()]; list.toArray(result[i]); } return result; } }
src/edu/jhu/thrax/extraction/HieroRuleExtractor.java
package edu.jhu.thrax.extraction; import java.util.Collection; import java.util.Arrays; import java.util.Set; import java.util.HashSet; import java.util.ArrayList; import java.util.Queue; import java.util.LinkedList; import edu.jhu.thrax.datatypes.*; import edu.jhu.thrax.util.Vocabulary; import edu.jhu.thrax.features.Feature; import edu.jhu.thrax.ThraxConfig; /** * This class extracts Hiero-style SCFG rules. The inputs that are needed * are "source" "target" and "alignment", which are the source and target * sides of a parallel corpus, and an alignment between each of the sentences. */ public class HieroRuleExtractor implements RuleExtractor { public static String name = "hiero"; public String [] requiredInputs() { return new String [] { ThraxConfig.SOURCE, ThraxConfig.TARGET, ThraxConfig.ALIGNMENT }; } public int INIT_LENGTH_LIMIT = 10; public int SOURCE_LENGTH_LIMIT = 5; public int NT_LIMIT = 2; public int LEXICAL_MINIMUM = 1; public boolean ALLOW_ADJACENT_NTS = false; public boolean ALLOW_LOOSE_BOUNDS = false; public static final String X = "X"; public static final int X_ID = Vocabulary.getId(X); private ArrayList<Feature> features; private int featureLength; /** * Default constructor. All it does is to initialize the list of * features to an empty list. */ public HieroRuleExtractor() { features = new ArrayList<Feature>(); featureLength = 0; ALLOW_ADJACENT_NTS = ThraxConfig.opts.containsKey(ThraxConfig.ADJACENT); ALLOW_LOOSE_BOUNDS = ThraxConfig.opts.containsKey(ThraxConfig.LOOSE); } public Set<Rule> extract(Object [] inputs) { if (inputs.length < 3) { return null; } int [] source = (int []) inputs[0]; int [] target = (int []) inputs[1]; Alignment alignment = (Alignment) inputs[2]; PhrasePair [][] phrasesByStart = initialPhrasePairs(source, target, alignment); Queue<Rule> q = new LinkedList<Rule>(); for (int i = 0; i < source.length; i++) q.offer(new Rule(source, target, alignment, i, NT_LIMIT)); return processQueue(q, phrasesByStart); } public void addFeature(Feature f) { features.add(f); featureLength += f.length(); } public void score(Rule r) { r.scores = new double[featureLength]; int idx = 0; for (Feature f : features) { System.arraycopy(f.score(r), 0, r.scores, idx, f.length()); idx += f.length(); } } protected Set<Rule> processQueue(Queue<Rule> q, PhrasePair [][] phrasesByStart) { Set<Rule> rules = new HashSet<Rule>(); while (q.peek() != null) { Rule r = q.poll(); if (isWellFormed(r)) { for (Rule s : getLabelVariants(r)) { for (Feature feat : features) feat.noteExtraction(s); rules.add(s); } } if (r.appendPoint > phrasesByStart.length - 1) continue; if (phrasesByStart[r.appendPoint] == null) continue; for (PhrasePair pp : phrasesByStart[r.appendPoint]) { if (pp.sourceEnd - r.rhs.sourceStart > INIT_LENGTH_LIMIT || (r.rhs.targetStart >= 0 && pp.targetEnd - r.rhs.targetStart > INIT_LENGTH_LIMIT)) continue; if (r.numNTs < NT_LIMIT && r.numNTs + r.numTerminals < SOURCE_LENGTH_LIMIT && (!r.sourceEndsWithNT || ALLOW_ADJACENT_NTS)) { Rule s = r.copy(); s.extendWithNonterminal(pp); q.offer(s); } if (r.numNTs + r.numTerminals + pp.sourceEnd - pp.sourceStart <= SOURCE_LENGTH_LIMIT) { Rule s = r.copy(); s.extendWithTerminals(pp); q.offer(s); } } } return rules; } protected boolean isWellFormed(Rule r) { if (r.rhs.targetStart < 0) return false; for (int i = r.rhs.targetStart; i < r.rhs.targetEnd; i++) { if (r.targetLex[i] < 0) return false; } return (r.alignedWords >= LEXICAL_MINIMUM); } private Collection<Rule> variantSet = new HashSet<Rule>(1); protected Collection<Rule> getLabelVariants(Rule r) { variantSet.clear(); r.lhs = X_ID; Arrays.fill(r.nts, X_ID); variantSet.add(r); return variantSet; } private PhrasePair [][] initialPhrasePairs(int [] f, int [] e, Alignment a) { PhrasePair [][] result = new PhrasePair[f.length][]; ArrayList<PhrasePair> list = new ArrayList<PhrasePair>(); for (int i = 0; i < f.length; i++) { list.clear(); int maxlen = f.length - i < INIT_LENGTH_LIMIT ? f.length - i : INIT_LENGTH_LIMIT; for (int len = 1; len <= maxlen; len++) { if (!ALLOW_LOOSE_BOUNDS && (!a.sourceIsAligned(i) || !a.sourceIsAligned(i+len-1))) continue; PhrasePair pp = a.getPairFromSource(i, i+len); if (pp != null && pp.targetEnd - pp.targetStart <= INIT_LENGTH_LIMIT) { list.add(pp); } } result[i] = new PhrasePair[list.size()]; list.toArray(result[i]); } return result; } }
fixed condition for extending rules with terminals If you allow two phrases of terminals to be appended, you end up double-counting some rules, where the larger single phrase (made up of those two phrases) could be appended instead. I changed the condition so that terminals can only be added after a non-terminal symbol and this should fix the problem.
src/edu/jhu/thrax/extraction/HieroRuleExtractor.java
fixed condition for extending rules with terminals
<ide><path>rc/edu/jhu/thrax/extraction/HieroRuleExtractor.java <ide> s.extendWithNonterminal(pp); <ide> q.offer(s); <ide> } <del> if (r.numNTs + r.numTerminals + pp.sourceEnd - pp.sourceStart <= SOURCE_LENGTH_LIMIT) { <add> if (r.numNTs + r.numTerminals + pp.sourceEnd - pp.sourceStart <= SOURCE_LENGTH_LIMIT && <add> (r.appendPoint == r.rhs.sourceStart || r.sourceEndsWithNT)) { <ide> Rule s = r.copy(); <ide> s.extendWithTerminals(pp); <ide> q.offer(s);
Java
apache-2.0
b5615fed3c54f6d2bb9f91d3e3d18629040520c1
0
cuba-platform/cuba,dimone-kun/cuba,cuba-platform/cuba,cuba-platform/cuba,dimone-kun/cuba,dimone-kun/cuba
/* * Copyright (c) 2008-2014 Haulmont. All rights reserved. * Use is subject to license terms, see http://www.cuba-platform.com/license for details. */ package com.haulmont.bali.util; import com.google.common.collect.ImmutableMap; import java.util.Map; /** * Utility class for instantiation immutable Map&lt;String, Object&gt;. <br/> * Null values will be ignored. Null keys are not permitted. * * @author artamonov * @version $Id$ */ public final class ParamsMap { private ParamsMap() { } private static void put(ImmutableMap.Builder<String, Object> builder, String key, Object value) { if (value != null) { builder.put(key, value); } } public static Map<String, Object> of(String paramName, Object paramValue) { ImmutableMap.Builder<String, Object> b = new ImmutableMap.Builder<>(); put(b, paramName, paramValue); return b.build(); } public static Map<String, Object> of(String paramName1, Object paramValue1, String paramName2, Object paramValue2) { ImmutableMap.Builder<String, Object> b = new ImmutableMap.Builder<>(); put(b, paramName1, paramValue1); put(b, paramName2, paramValue2); return b.build(); } public static Map<String, Object> of(String paramName1, Object paramValue1, String paramName2, Object paramValue2, String paramName3, Object paramValue3) { ImmutableMap.Builder<String, Object> b = new ImmutableMap.Builder<>(); put(b, paramName1, paramValue1); put(b, paramName2, paramValue2); put(b, paramName3, paramValue3); return b.build(); } public static Map<String, Object> of(String paramName1, Object paramValue1, String paramName2, Object paramValue2, String paramName3, Object paramValue3, String paramName4, Object paramValue4) { ImmutableMap.Builder<String, Object> b = new ImmutableMap.Builder<>(); put(b, paramName1, paramValue1); put(b, paramName2, paramValue2); put(b, paramName3, paramValue3); put(b, paramName4, paramValue4); return b.build(); } public static Map<String, Object> of(String paramName1, Object paramValue1, String paramName2, Object paramValue2, String paramName3, Object paramValue3, String paramName4, Object paramValue4, String paramName5, Object paramValue5) { ImmutableMap.Builder<String, Object> b = new ImmutableMap.Builder<>(); put(b, paramName1, paramValue1); put(b, paramName2, paramValue2); put(b, paramName3, paramValue3); put(b, paramName4, paramValue4); put(b, paramName5, paramValue5); return b.build(); } public static Map<String, Object> of(String paramName1, Object paramValue1, String paramName2, Object paramValue2, String paramName3, Object paramValue3, String paramName4, Object paramValue4, String paramName5, Object paramValue5, String paramName6, Object paramValue6) { ImmutableMap.Builder<String, Object> b = new ImmutableMap.Builder<>(); put(b, paramName1, paramValue1); put(b, paramName2, paramValue2); put(b, paramName3, paramValue3); put(b, paramName4, paramValue4); put(b, paramName5, paramValue5); put(b, paramName6, paramValue6); return b.build(); } }
modules/global/src/com/haulmont/bali/util/ParamsMap.java
/* * Copyright (c) 2008-2014 Haulmont. All rights reserved. * Use is subject to license terms, see http://www.cuba-platform.com/license for details. */ package com.haulmont.bali.util; import com.google.common.collect.ImmutableMap; import java.util.Map; /** * Utility class for instantiation immutable Map&lt;String, Object&gt; * * @author artamonov * @version $Id$ */ public final class ParamsMap { private ParamsMap() { } public static Map<String, Object> of(String paramName, Object value) { return ImmutableMap.of(paramName, value); } public static Map<String, Object> of(String paramName1, Object paramValue1, String paramName2, Object paramValue2) { return ImmutableMap.of(paramName1, paramValue1, paramName2, paramValue2); } public static Map<String, Object> of(String paramName1, Object paramValue1, String paramName2, Object paramValue2, String paramName3, Object paramValue3) { return ImmutableMap.of(paramName1, paramValue1, paramName2, paramValue2, paramName3, paramValue3); } public static Map<String, Object> of(String paramName1, Object paramValue1, String paramName2, Object paramValue2, String paramName3, Object paramValue3, String paramName4, Object paramValue4) { return ImmutableMap.of(paramName1, paramValue1, paramName2,paramValue2, paramName3, paramValue3, paramName4, paramValue4); } public static Map<String, Object> of(String paramName1, Object paramValue1, String paramName2, Object paramValue2, String paramName3, Object paramValue3, String paramName4, Object paramValue4, String paramName5, Object paramValue5) { return ImmutableMap.of(paramName1, paramValue1, paramName2, paramValue2, paramName3, paramValue3, paramName4, paramValue4, paramName5, paramValue5); } }
ParamsMap does not allow nulls in values #PL-4950
modules/global/src/com/haulmont/bali/util/ParamsMap.java
ParamsMap does not allow nulls in values #PL-4950
<ide><path>odules/global/src/com/haulmont/bali/util/ParamsMap.java <ide> import java.util.Map; <ide> <ide> /** <del> * Utility class for instantiation immutable Map&lt;String, Object&gt; <add> * Utility class for instantiation immutable Map&lt;String, Object&gt;. <br/> <add> * Null values will be ignored. Null keys are not permitted. <ide> * <ide> * @author artamonov <ide> * @version $Id$ <ide> private ParamsMap() { <ide> } <ide> <del> public static Map<String, Object> of(String paramName, Object value) { <del> return ImmutableMap.of(paramName, value); <add> private static void put(ImmutableMap.Builder<String, Object> builder, String key, Object value) { <add> if (value != null) { <add> builder.put(key, value); <add> } <add> } <add> <add> public static Map<String, Object> of(String paramName, Object paramValue) { <add> ImmutableMap.Builder<String, Object> b = new ImmutableMap.Builder<>(); <add> put(b, paramName, paramValue); <add> return b.build(); <ide> } <ide> <ide> public static Map<String, Object> of(String paramName1, Object paramValue1, <ide> String paramName2, Object paramValue2) { <del> return ImmutableMap.of(paramName1, paramValue1, paramName2, paramValue2); <add> ImmutableMap.Builder<String, Object> b = new ImmutableMap.Builder<>(); <add> put(b, paramName1, paramValue1); <add> put(b, paramName2, paramValue2); <add> return b.build(); <ide> } <ide> <ide> public static Map<String, Object> of(String paramName1, Object paramValue1, <ide> String paramName2, Object paramValue2, <ide> String paramName3, Object paramValue3) { <del> return ImmutableMap.of(paramName1, paramValue1, paramName2, paramValue2, paramName3, paramValue3); <add> ImmutableMap.Builder<String, Object> b = new ImmutableMap.Builder<>(); <add> put(b, paramName1, paramValue1); <add> put(b, paramName2, paramValue2); <add> put(b, paramName3, paramValue3); <add> return b.build(); <ide> } <ide> <ide> public static Map<String, Object> of(String paramName1, Object paramValue1, <ide> String paramName2, Object paramValue2, <ide> String paramName3, Object paramValue3, <ide> String paramName4, Object paramValue4) { <del> return ImmutableMap.of(paramName1, paramValue1, paramName2,paramValue2, <del> paramName3, paramValue3, paramName4, paramValue4); <add> ImmutableMap.Builder<String, Object> b = new ImmutableMap.Builder<>(); <add> put(b, paramName1, paramValue1); <add> put(b, paramName2, paramValue2); <add> put(b, paramName3, paramValue3); <add> put(b, paramName4, paramValue4); <add> return b.build(); <ide> } <ide> <ide> public static Map<String, Object> of(String paramName1, Object paramValue1, <ide> String paramName3, Object paramValue3, <ide> String paramName4, Object paramValue4, <ide> String paramName5, Object paramValue5) { <del> return ImmutableMap.of(paramName1, paramValue1, paramName2, paramValue2, <del> paramName3, paramValue3, paramName4, paramValue4, paramName5, paramValue5); <add> ImmutableMap.Builder<String, Object> b = new ImmutableMap.Builder<>(); <add> put(b, paramName1, paramValue1); <add> put(b, paramName2, paramValue2); <add> put(b, paramName3, paramValue3); <add> put(b, paramName4, paramValue4); <add> put(b, paramName5, paramValue5); <add> return b.build(); <add> } <add> <add> public static Map<String, Object> of(String paramName1, Object paramValue1, <add> String paramName2, Object paramValue2, <add> String paramName3, Object paramValue3, <add> String paramName4, Object paramValue4, <add> String paramName5, Object paramValue5, <add> String paramName6, Object paramValue6) { <add> ImmutableMap.Builder<String, Object> b = new ImmutableMap.Builder<>(); <add> put(b, paramName1, paramValue1); <add> put(b, paramName2, paramValue2); <add> put(b, paramName3, paramValue3); <add> put(b, paramName4, paramValue4); <add> put(b, paramName5, paramValue5); <add> put(b, paramName6, paramValue6); <add> return b.build(); <ide> } <ide> }
Java
apache-2.0
98edad84b1d21c66163e6c7be3aba99caf030d2a
0
mrniko/redisson,redisson/redisson
/** * Copyright 2018 Nikita Koksharov * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.redisson.tomcat; import java.io.File; import java.io.IOException; import java.util.HashMap; import java.util.Map; import java.util.Map.Entry; import java.util.UUID; import javax.servlet.http.HttpSession; import org.apache.catalina.LifecycleException; import org.apache.catalina.LifecycleState; import org.apache.catalina.Session; import org.apache.catalina.session.ManagerBase; import org.apache.juli.logging.Log; import org.apache.juli.logging.LogFactory; import org.redisson.Redisson; import org.redisson.api.RMap; import org.redisson.api.RTopic; import org.redisson.api.RedissonClient; import org.redisson.api.listener.MessageListener; import org.redisson.client.codec.Codec; import org.redisson.config.Config; /** * Redisson Session Manager for Apache Tomcat * * @author Nikita Koksharov * */ public class RedissonSessionManager extends ManagerBase { public enum ReadMode {REDIS, MEMORY} public enum UpdateMode {DEFAULT, AFTER_REQUEST} private final Log log = LogFactory.getLog(RedissonSessionManager.class); private RedissonClient redisson; private String configPath; private ReadMode readMode = ReadMode.MEMORY; private UpdateMode updateMode = UpdateMode.DEFAULT; private String keyPrefix = ""; private final String nodeId = UUID.randomUUID().toString(); public String getNodeId() { return nodeId; } public String getUpdateMode() { return updateMode.toString(); } public void setUpdateMode(String updateMode) { this.updateMode = UpdateMode.valueOf(updateMode); } public String getReadMode() { return readMode.toString(); } public void setReadMode(String readMode) { this.readMode = ReadMode.valueOf(readMode); } public void setConfigPath(String configPath) { this.configPath = configPath; } public String getConfigPath() { return configPath; } public String getKeyPrefix() { return keyPrefix; } public void setKeyPrefix(String keyPrefix) { this.keyPrefix = keyPrefix; } @Override public String getName() { return RedissonSessionManager.class.getSimpleName(); } @Override public void load() throws ClassNotFoundException, IOException { } @Override public void unload() throws IOException { } @Override public Session createSession(String sessionId) { RedissonSession session = (RedissonSession) createEmptySession(); session.setNew(true); session.setValid(true); session.setCreationTime(System.currentTimeMillis()); session.setMaxInactiveInterval(getContext().getSessionTimeout() * 60); if (sessionId == null) { sessionId = generateSessionId(); } session.setManager(this); session.setId(sessionId); session.save(); return session; } public RMap<String, Object> getMap(String sessionId) { String separator = keyPrefix == null || keyPrefix.isEmpty() ? "" : ":"; final String name = keyPrefix + separator + "redisson:tomcat_session:" + sessionId; return redisson.getMap(name); } public RTopic getTopic() { String separator = keyPrefix == null || keyPrefix.isEmpty() ? "" : ":"; final String name = keyPrefix + separator + "redisson:tomcat_session_updates:" + getContext().getName(); return redisson.getTopic(name); } @Override public Session findSession(String id) throws IOException { Session result = super.findSession(id); if (result == null) { if (id != null) { Map<String, Object> attrs = new HashMap<String, Object>(); if (readMode == ReadMode.MEMORY) { attrs = getMap(id).readAllMap(); } else { attrs = getMap(id).getAll(RedissonSession.ATTRS); } if (attrs.isEmpty() || !Boolean.valueOf(String.valueOf(attrs.get("session:isValid")))) { log.info("Session " + id + " can't be found"); return null; } RedissonSession session = (RedissonSession) createEmptySession(); session.setId(id); session.setManager(this); session.load(attrs); session.access(); session.endAccess(); return session; } return null; } result.access(); result.endAccess(); return result; } @Override public Session createEmptySession() { return new RedissonSession(this, readMode, updateMode); } @Override public void remove(Session session, boolean update) { super.remove(session, update); if (session.getIdInternal() != null) { ((RedissonSession)session).delete(); } } public RedissonClient getRedisson() { return redisson; } @Override protected void startInternal() throws LifecycleException { super.startInternal(); redisson = buildClient(); if (updateMode == UpdateMode.AFTER_REQUEST) { getEngine().getPipeline().addValve(new UpdateValve(this)); } if (readMode == ReadMode.MEMORY) { RTopic updatesTopic = getTopic(); updatesTopic.addListener(AttributeMessage.class, new MessageListener<AttributeMessage>() { @Override public void onMessage(CharSequence channel, AttributeMessage msg) { try { // TODO make it thread-safe RedissonSession session = (RedissonSession) RedissonSessionManager.super.findSession(msg.getSessionId()); if (session != null && !msg.getNodeId().equals(nodeId)) { if (msg instanceof AttributeRemoveMessage) { session.superRemoveAttributeInternal(((AttributeRemoveMessage)msg).getName(), true); } if (msg instanceof AttributesClearMessage) { RedissonSessionManager.super.remove(session, false); } if (msg instanceof AttributesPutAllMessage) { AttributesPutAllMessage m = (AttributesPutAllMessage) msg; for (Entry<String, Object> entry : m.getAttrs().entrySet()) { session.superSetAttribute(entry.getKey(), entry.getValue(), true); } } if (msg instanceof AttributeUpdateMessage) { AttributeUpdateMessage m = (AttributeUpdateMessage)msg; session.superSetAttribute(m.getName(), m.getValue(), true); } } } catch (IOException e) { log.error("Can't handle topic message", e); } } }); } setState(LifecycleState.STARTING); } protected RedissonClient buildClient() throws LifecycleException { Config config = null; try { config = Config.fromJSON(new File(configPath), getClass().getClassLoader()); } catch (IOException e) { // trying next format try { config = Config.fromYAML(new File(configPath), getClass().getClassLoader()); } catch (IOException e1) { log.error("Can't parse json config " + configPath, e); throw new LifecycleException("Can't parse yaml config " + configPath, e1); } } try { try { Config c = new Config(config); Codec codec = c.getCodec().getClass().getConstructor(ClassLoader.class) .newInstance(Thread.currentThread().getContextClassLoader()); config.setCodec(codec); } catch (Exception e) { throw new IllegalStateException("Unable to initialize codec with ClassLoader parameter", e); } return Redisson.create(config); } catch (Exception e) { throw new LifecycleException(e); } } @Override protected void stopInternal() throws LifecycleException { super.stopInternal(); setState(LifecycleState.STOPPING); try { if (redisson != null) { redisson.shutdown(); } } catch (Exception e) { throw new LifecycleException(e); } } public void store(HttpSession session) throws IOException { if (session == null) { return; } if (updateMode == UpdateMode.AFTER_REQUEST) { RedissonSession sess = (RedissonSession) super.findSession(session.getId()); if (sess != null) { sess.save(); } } } }
redisson-tomcat/redisson-tomcat-8/src/main/java/org/redisson/tomcat/RedissonSessionManager.java
/** * Copyright 2018 Nikita Koksharov * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.redisson.tomcat; import java.io.File; import java.io.IOException; import java.util.HashMap; import java.util.Map; import java.util.Map.Entry; import java.util.UUID; import javax.servlet.http.HttpSession; import org.apache.catalina.LifecycleException; import org.apache.catalina.LifecycleState; import org.apache.catalina.Session; import org.apache.catalina.session.ManagerBase; import org.apache.juli.logging.Log; import org.apache.juli.logging.LogFactory; import org.redisson.Redisson; import org.redisson.api.RMap; import org.redisson.api.RTopic; import org.redisson.api.RedissonClient; import org.redisson.api.listener.MessageListener; import org.redisson.client.codec.Codec; import org.redisson.config.Config; /** * Redisson Session Manager for Apache Tomcat * * @author Nikita Koksharov * */ public class RedissonSessionManager extends ManagerBase { public enum ReadMode {REDIS, MEMORY} public enum UpdateMode {DEFAULT, AFTER_REQUEST} private final Log log = LogFactory.getLog(RedissonSessionManager.class); private RedissonClient redisson; private String configPath; private ReadMode readMode = ReadMode.MEMORY; private UpdateMode updateMode = UpdateMode.DEFAULT; private String keyPrefix = ""; private final String nodeId = UUID.randomUUID().toString(); public String getNodeId() { return nodeId; } public String getUpdateMode() { return updateMode.toString(); } public void setUpdateMode(String updateMode) { this.updateMode = UpdateMode.valueOf(updateMode); } public String getReadMode() { return readMode.toString(); } public void setReadMode(String readMode) { this.readMode = ReadMode.valueOf(readMode); } public void setConfigPath(String configPath) { this.configPath = configPath; } public String getConfigPath() { return configPath; } public String getKeyPrefix() { return keyPrefix; } public void setKeyPrefix(String keyPrefix) { this.keyPrefix = keyPrefix; } @Override public String getName() { return RedissonSessionManager.class.getSimpleName(); } @Override public void load() throws ClassNotFoundException, IOException { } @Override public void unload() throws IOException { } @Override public Session createSession(String sessionId) { RedissonSession session = (RedissonSession) createEmptySession(); session.setNew(true); session.setValid(true); session.setCreationTime(System.currentTimeMillis()); session.setMaxInactiveInterval(getContext().getSessionTimeout() * 60); if (sessionId == null) { sessionId = generateSessionId(); } session.setManager(this); session.setId(sessionId); session.save(); return session; } public RMap<String, Object> getMap(String sessionId) { String separator = keyPrefix == null || keyPrefix.isEmpty() ? "" : ":"; final String name = keyPrefix + separator + "redisson:tomcat_session:" + sessionId; return redisson.getMap(name); } public RTopic getTopic() { return redisson.getTopic("redisson:tomcat_session_updates:" + getContext().getName()); } @Override public Session findSession(String id) throws IOException { Session result = super.findSession(id); if (result == null) { if (id != null) { Map<String, Object> attrs = new HashMap<String, Object>(); if (readMode == ReadMode.MEMORY) { attrs = getMap(id).readAllMap(); } else { attrs = getMap(id).getAll(RedissonSession.ATTRS); } if (attrs.isEmpty() || !Boolean.valueOf(String.valueOf(attrs.get("session:isValid")))) { log.info("Session " + id + " can't be found"); return null; } RedissonSession session = (RedissonSession) createEmptySession(); session.setId(id); session.setManager(this); session.load(attrs); session.access(); session.endAccess(); return session; } return null; } result.access(); result.endAccess(); return result; } @Override public Session createEmptySession() { return new RedissonSession(this, readMode, updateMode); } @Override public void remove(Session session, boolean update) { super.remove(session, update); if (session.getIdInternal() != null) { ((RedissonSession)session).delete(); } } public RedissonClient getRedisson() { return redisson; } @Override protected void startInternal() throws LifecycleException { super.startInternal(); redisson = buildClient(); if (updateMode == UpdateMode.AFTER_REQUEST) { getEngine().getPipeline().addValve(new UpdateValve(this)); } if (readMode == ReadMode.MEMORY) { RTopic updatesTopic = getTopic(); updatesTopic.addListener(AttributeMessage.class, new MessageListener<AttributeMessage>() { @Override public void onMessage(CharSequence channel, AttributeMessage msg) { try { // TODO make it thread-safe RedissonSession session = (RedissonSession) RedissonSessionManager.super.findSession(msg.getSessionId()); if (session != null && !msg.getNodeId().equals(nodeId)) { if (msg instanceof AttributeRemoveMessage) { session.superRemoveAttributeInternal(((AttributeRemoveMessage)msg).getName(), true); } if (msg instanceof AttributesClearMessage) { RedissonSessionManager.super.remove(session, false); } if (msg instanceof AttributesPutAllMessage) { AttributesPutAllMessage m = (AttributesPutAllMessage) msg; for (Entry<String, Object> entry : m.getAttrs().entrySet()) { session.superSetAttribute(entry.getKey(), entry.getValue(), true); } } if (msg instanceof AttributeUpdateMessage) { AttributeUpdateMessage m = (AttributeUpdateMessage)msg; session.superSetAttribute(m.getName(), m.getValue(), true); } } } catch (IOException e) { log.error("Can't handle topic message", e); } } }); } setState(LifecycleState.STARTING); } protected RedissonClient buildClient() throws LifecycleException { Config config = null; try { config = Config.fromJSON(new File(configPath), getClass().getClassLoader()); } catch (IOException e) { // trying next format try { config = Config.fromYAML(new File(configPath), getClass().getClassLoader()); } catch (IOException e1) { log.error("Can't parse json config " + configPath, e); throw new LifecycleException("Can't parse yaml config " + configPath, e1); } } try { try { Config c = new Config(config); Codec codec = c.getCodec().getClass().getConstructor(ClassLoader.class) .newInstance(Thread.currentThread().getContextClassLoader()); config.setCodec(codec); } catch (Exception e) { throw new IllegalStateException("Unable to initialize codec with ClassLoader parameter", e); } return Redisson.create(config); } catch (Exception e) { throw new LifecycleException(e); } } @Override protected void stopInternal() throws LifecycleException { super.stopInternal(); setState(LifecycleState.STOPPING); try { if (redisson != null) { redisson.shutdown(); } } catch (Exception e) { throw new LifecycleException(e); } } public void store(HttpSession session) throws IOException { if (session == null) { return; } if (updateMode == UpdateMode.AFTER_REQUEST) { RedissonSession sess = (RedissonSession) super.findSession(session.getId()); if (sess != null) { sess.save(); } } } }
[tomcat] Apply `keyPrefix` to both session and top keys I was testing out different codecs in different environments connected to the same redis instance. I provided a different `keyPrefix` in `context.xml` to prevent one environment's codec from messing with another environment's session management. However, since the `keyPrefix` is only applied to the session contents themselves and not the topic subscription, codec errors were still present and causing a lot of log spew. I expected `keyPrefix` to apply not only to the session contents but also to the topic updates, so I'm publishing this PR to see if you guys agree. Here are example of updates for two different codecs with two different `keyPrefix` BEFORE this commit is applied. ```+1544721052.037348 [0 10.0.1.184:40588] "publish" "redisson:tomcat_session_updates:" "{\"@class\":\"org.redisson.tomcat.AttributesPutAllMessage\",\"attrs\":{\"@class\":\"java.util.HashMap\",\"session:thisAccessedTime\":[\"java.lang.Long\",1544721052035],\"session:isNew\":true,\"session:lastAccessedTime\":[\"java.lang.Long\",1544721052035],\"session:maxInactiveInterval\":3600,\"session:isValid\":true,\"session:creationTime\":[\"java.lang.Long\",1544721052035]},\"nodeId\":\"a2a9d075-4b40-4ea4-a8f4-174cb8caaa08\",\"sessionId\":\"1D83362C925593187FB081FE63BD14D0\"}"``` ```+1544721052.300687 [0 10.0.0.213:55652] "publish" "redisson:tomcat_session_updates:" "\xac\xed\x00\x05sr\x00*org.redisson.tomcat.AttributeUpdateMessage\x04\x81\x81\xaa!\xb5\xabE\x02\x00\x02L\x00\x04namet\x00\x12Ljava/lang/String;L\x00\x05valuet\x00\x12Ljava/lang/Object;xr\x00$org.redisson.tomcat.AttributeMessage_\xc4\xf5\xe6~\xa9v\xa3\x02\x00\x02L\x00\x06nodeIdq\x00~\x00\x01L\x00\tsessionIdq\x00~\x00\x01xpt\x00$178c19c3-a5d5-4152-bf99-72c5102c0addt\x00 1CAE476D1DCC75948E8DBD7BCA48FF16t\x00\rsession:isNewsr\x00\x11java.lang.Boolean\xcd r\x80\xd5\x9c\xfa\xee\x02\x00\x01Z\x00\x05valuexp\x00"```
redisson-tomcat/redisson-tomcat-8/src/main/java/org/redisson/tomcat/RedissonSessionManager.java
[tomcat] Apply `keyPrefix` to both session and top keys
<ide><path>edisson-tomcat/redisson-tomcat-8/src/main/java/org/redisson/tomcat/RedissonSessionManager.java <ide> } <ide> <ide> public RTopic getTopic() { <del> return redisson.getTopic("redisson:tomcat_session_updates:" + getContext().getName()); <add> String separator = keyPrefix == null || keyPrefix.isEmpty() ? "" : ":"; <add> final String name = keyPrefix + separator + "redisson:tomcat_session_updates:" + getContext().getName(); <add> return redisson.getTopic(name); <ide> } <ide> <ide> @Override
Java
apache-2.0
4f3b14c4480e3310fe614aa509e0a4bdaf14143e
0
EBISPOT/goci,EBISPOT/goci,EBISPOT/goci,EBISPOT/goci,EBISPOT/goci,EBISPOT/goci
package uk.ac.ebi.spot.goci.service; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.data.domain.Page; import org.springframework.data.domain.Pageable; import org.springframework.data.domain.Sort; import org.springframework.stereotype.Service; import org.springframework.transaction.annotation.Transactional; import uk.ac.ebi.spot.goci.model.Association; import uk.ac.ebi.spot.goci.model.Gene; import uk.ac.ebi.spot.goci.model.Region; import uk.ac.ebi.spot.goci.model.RiskAllele; import uk.ac.ebi.spot.goci.model.SingleNucleotidePolymorphism; import uk.ac.ebi.spot.goci.model.Study; import uk.ac.ebi.spot.goci.repository.AssociationRepository; import java.util.Collection; import java.util.HashSet; import java.util.List; import java.util.concurrent.atomic.AtomicInteger; import java.util.stream.Collectors; /** * Javadocs go here! * * @author Tony Burdett * @date 16/01/15 */ @Service public class AssociationService { private AssociationRepository associationRepository; private StudyService studyService; private SingleNucleotidePolymorphismService snpService; private final Logger log = LoggerFactory.getLogger(getClass()); @Autowired public AssociationService(AssociationRepository associationRepository, StudyService studyService, SingleNucleotidePolymorphismService snpService) { this.associationRepository = associationRepository; this.studyService = studyService; this.snpService = snpService; } protected Logger getLog() { return log; } /** * A facade service around a {@link uk.ac.ebi.spot.goci.repository.AssociationRepository} that retrieves all * associations, and then within the same datasource transaction additionally loads other objects referenced by this * association (so Genes and Regions). * <p> * Use this when you know you will need deep information about a association and do not have an open session that * can be used to lazy load extra data. * * @return a list of Associations */ @Transactional(readOnly = true) public List<Association> deepFindAll() { List<Association> allAssociations = associationRepository.findAll(); // iterate over all Associations and grab region info getLog().info("Obtained " + allAssociations.size() + " associations, starting deep load..."); allAssociations.forEach(this::loadAssociatedData); return allAssociations; } @Transactional(readOnly = true) public List<Association> deepFindAll(Sort sort) { List<Association> allAssociations = associationRepository.findAll(sort); // iterate over all Associations and grab region info getLog().info("Obtained " + allAssociations.size() + " associations, starting deep load..."); allAssociations.forEach(this::loadAssociatedData); return allAssociations; } @Transactional(readOnly = true) public Page<Association> deepFindAll(Pageable pageable) { Page<Association> allAssociations = associationRepository.findAll(pageable); // iterate over all Associations and grab region info getLog().info("Obtained " + allAssociations.getSize() + " associations, starting deep load..."); allAssociations.forEach(this::loadAssociatedData); return allAssociations; } public void loadAssociatedData(Association association) { int traitCount = association.getEfoTraits().size(); Study study = studyService.deepFetchOne(association.getStudy()); AtomicInteger reportedGeneCount = new AtomicInteger(); Collection<SingleNucleotidePolymorphism> snps = new HashSet<>(); Collection<Region> regions = new HashSet<>(); Collection<Gene> mappedGenes = new HashSet<>(); association.getLoci().forEach( locus -> { locus.getStrongestRiskAlleles().stream().map(RiskAllele::getSnp).forEach( snp -> { snp.getRegions().forEach(regions::add); snp.getGenomicContexts().forEach(context -> mappedGenes.add(context.getGene())); snps.add(snp); } ); snps.addAll(locus.getStrongestRiskAlleles() .stream() .map(RiskAllele::getSnp) .collect(Collectors.toList())); reportedGeneCount.addAndGet(locus.getAuthorReportedGenes().size()); }); getLog().info("Association '" + association.getId() + "' is mapped to " + "" + traitCount + " EFO traits where study id = " + study.getId() + " " + "(author reported " + reportedGeneCount + " gene(s)); " + "this reports on " + snps.size() + " SNPs in " + regions.size() + " regions, " + "mapped to " + mappedGenes.size() + " genes."); } }
goci-core/goci-service/src/main/java/uk/ac/ebi/spot/goci/service/AssociationService.java
package uk.ac.ebi.spot.goci.service; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.data.domain.Page; import org.springframework.data.domain.Pageable; import org.springframework.data.domain.Sort; import org.springframework.stereotype.Service; import org.springframework.transaction.annotation.Transactional; import uk.ac.ebi.spot.goci.model.Association; import uk.ac.ebi.spot.goci.model.Locus; import uk.ac.ebi.spot.goci.model.RiskAllele; import uk.ac.ebi.spot.goci.model.SingleNucleotidePolymorphism; import uk.ac.ebi.spot.goci.model.Study; import uk.ac.ebi.spot.goci.repository.AssociationRepository; import java.util.Collection; import java.util.HashSet; import java.util.List; import java.util.stream.Collectors; /** * Javadocs go here! * * @author Tony Burdett * @date 16/01/15 */ @Service public class AssociationService { private AssociationRepository associationRepository; private StudyService studyService; private SingleNucleotidePolymorphismService snpService; private final Logger log = LoggerFactory.getLogger(getClass()); @Autowired public AssociationService(AssociationRepository associationRepository, StudyService studyService, SingleNucleotidePolymorphismService snpService) { this.associationRepository = associationRepository; this.studyService = studyService; this.snpService = snpService; } protected Logger getLog() { return log; } /** * A facade service around a {@link uk.ac.ebi.spot.goci.repository.AssociationRepository} that retrieves all * associations, and then within the same datasource transaction additionally loads other objects referenced by this * association (so Genes and Regions). * <p> * Use this when you know you will need deep information about a association and do not have an open session that * can be used to lazy load extra data. * * @return a list of Associations */ @Transactional(readOnly = true) public List<Association> deepFindAll() { List<Association> allAssociations = associationRepository.findAll(); // iterate over all Associations and grab region info getLog().info("Obtained " + allAssociations.size() + " associations, starting deep load..."); allAssociations.forEach(this::loadAssociatedData); return allAssociations; } @Transactional(readOnly = true) public List<Association> deepFindAll(Sort sort) { List<Association> allAssociations = associationRepository.findAll(sort); // iterate over all Associations and grab region info getLog().info("Obtained " + allAssociations.size() + " associations, starting deep load..."); allAssociations.forEach(this::loadAssociatedData); return allAssociations; } @Transactional(readOnly = true) public Page<Association> deepFindAll(Pageable pageable) { Page<Association> allAssociations = associationRepository.findAll(pageable); // iterate over all Associations and grab region info getLog().info("Obtained " + allAssociations.getSize() + " associations, starting deep load..."); allAssociations.forEach(this::loadAssociatedData); return allAssociations; } public void loadAssociatedData(Association association) { int traitCount = association.getEfoTraits().size(); Study study = studyService.deepFetchOne(association.getStudy()); int reportedGeneCount = 0; Collection<SingleNucleotidePolymorphism> snps = new HashSet<>(); association.getLoci().forEach( locus -> snps.addAll( locus.getStrongestRiskAlleles() .stream() .map(RiskAllele::getSnp) .collect(Collectors.toList()))); getLog().info("Association '" + association.getId() + "' is mapped to " + "" + traitCount + " EFO traits, " + reportedGeneCount + " genes, " + "study id = " + study.getId() + " and " + snps.size() + " SNPs"); } }
getting solr indexing working with new fields
goci-core/goci-service/src/main/java/uk/ac/ebi/spot/goci/service/AssociationService.java
getting solr indexing working with new fields
<ide><path>oci-core/goci-service/src/main/java/uk/ac/ebi/spot/goci/service/AssociationService.java <ide> import org.springframework.stereotype.Service; <ide> import org.springframework.transaction.annotation.Transactional; <ide> import uk.ac.ebi.spot.goci.model.Association; <del>import uk.ac.ebi.spot.goci.model.Locus; <add>import uk.ac.ebi.spot.goci.model.Gene; <add>import uk.ac.ebi.spot.goci.model.Region; <ide> import uk.ac.ebi.spot.goci.model.RiskAllele; <ide> import uk.ac.ebi.spot.goci.model.SingleNucleotidePolymorphism; <ide> import uk.ac.ebi.spot.goci.model.Study; <ide> import java.util.Collection; <ide> import java.util.HashSet; <ide> import java.util.List; <add>import java.util.concurrent.atomic.AtomicInteger; <ide> import java.util.stream.Collectors; <ide> <ide> /** <ide> public void loadAssociatedData(Association association) { <ide> int traitCount = association.getEfoTraits().size(); <ide> Study study = studyService.deepFetchOne(association.getStudy()); <del> int reportedGeneCount = 0; <add> AtomicInteger reportedGeneCount = new AtomicInteger(); <ide> Collection<SingleNucleotidePolymorphism> snps = new HashSet<>(); <add> Collection<Region> regions = new HashSet<>(); <add> Collection<Gene> mappedGenes = new HashSet<>(); <ide> association.getLoci().forEach( <del> locus -> snps.addAll( <del> locus.getStrongestRiskAlleles() <del> .stream() <del> .map(RiskAllele::getSnp) <del> .collect(Collectors.toList()))); <add> locus -> { <add> locus.getStrongestRiskAlleles().stream().map(RiskAllele::getSnp).forEach( <add> snp -> { <add> snp.getRegions().forEach(regions::add); <add> snp.getGenomicContexts().forEach(context -> mappedGenes.add(context.getGene())); <add> snps.add(snp); <add> } <add> ); <add> <add> snps.addAll(locus.getStrongestRiskAlleles() <add> .stream() <add> .map(RiskAllele::getSnp) <add> .collect(Collectors.toList())); <add> reportedGeneCount.addAndGet(locus.getAuthorReportedGenes().size()); <add> }); <ide> getLog().info("Association '" + association.getId() + "' is mapped to " + <del> "" + traitCount + " EFO traits, " + reportedGeneCount + " genes, " + <del> "study id = " + study.getId() + " and " + snps.size() + " SNPs"); <add> "" + traitCount + " EFO traits where study id = " + study.getId() + " " + <add> "(author reported " + reportedGeneCount + " gene(s)); " + <add> "this reports on " + snps.size() + " SNPs in " + regions.size() + " regions, " + <add> "mapped to " + mappedGenes.size() + " genes."); <ide> } <ide> }
Java
mit
error: pathspec 'src/main/java/net/secknv/nkmod/blocks/GrinderBlockContainer.java' did not match any file(s) known to git
afd94acb72a140c98ed41732d21f51f9866ea878
1
secknv/Naschkatze
package net.secknv.nkmod.blocks; import net.minecraft.entity.player.PlayerEntity; import net.minecraft.entity.player.PlayerInventory; import net.minecraft.inventory.container.Container; import net.minecraft.tileentity.TileEntity; import net.minecraft.util.IWorldPosCallable; import net.minecraft.util.math.BlockPos; import net.minecraft.world.World; import net.minecraftforge.items.CapabilityItemHandler; import net.minecraftforge.items.IItemHandler; import net.minecraftforge.items.SlotItemHandler; import net.minecraftforge.items.wrapper.InvWrapper; import static net.secknv.nkmod.RegistryHandler.GRINDER; import static net.secknv.nkmod.RegistryHandler.GRINDER_CONTAINER; public class GrinderBlockContainer extends Container { private TileEntity tileEntity; private PlayerEntity playerEntity; private IItemHandler playerInventory; public GrinderBlockContainer(int windowId, World world, BlockPos pos, PlayerInventory playerInventory, PlayerEntity player) { super(GRINDER_CONTAINER.get(), windowId); tileEntity = world.getTileEntity(pos); this.playerEntity = player; this.playerInventory = new InvWrapper(playerInventory); if (tileEntity != null) { tileEntity.getCapability(CapabilityItemHandler.ITEM_HANDLER_CAPABILITY).ifPresent(h -> { // Furnace input slot addSlot(new SlotItemHandler(h, 0, 56, 17)); // Furnace fuel slot // todo: item handler for fuel //addSlot(new SlotItemHandler(h, 1, 56, 53)); // Furnace result slot // todo: item handler / code for output //addSlot(new SlotItemHandler(h, 2, 116, 35)); }); } layoutPlayerInventorySlots(8, 84); } @Override public boolean canInteractWith(PlayerEntity playerIn) { return isWithinUsableDistance(IWorldPosCallable.of(tileEntity.getWorld(), tileEntity.getPos()), playerEntity, GRINDER.get()); } private int addSlotRange(IItemHandler handler, int index, int x, int y, int amount, int dx) { for (int i = 0; i < amount; i++) { addSlot(new SlotItemHandler(handler, index, x, y)); x+=dx; index++; } return index; } private int addSlotBox(IItemHandler handler, int index, int x, int y, int horAmount, int dx, int verAmount, int dy) { for (int j = 0; j < verAmount; j++) { index = addSlotRange(handler, index, x, y, horAmount, dx); y+=dy; } return index; } private void layoutPlayerInventorySlots(int leftCol, int topRow) { // Player inventory addSlotBox(playerInventory, 9, leftCol, topRow, 9, 18, 3, 18); // Hotbar topRow += 58; addSlotRange(playerInventory, 0, leftCol, topRow, 9, 18); } }
src/main/java/net/secknv/nkmod/blocks/GrinderBlockContainer.java
Added grinder container
src/main/java/net/secknv/nkmod/blocks/GrinderBlockContainer.java
Added grinder container
<ide><path>rc/main/java/net/secknv/nkmod/blocks/GrinderBlockContainer.java <add>package net.secknv.nkmod.blocks; <add> <add>import net.minecraft.entity.player.PlayerEntity; <add>import net.minecraft.entity.player.PlayerInventory; <add>import net.minecraft.inventory.container.Container; <add>import net.minecraft.tileentity.TileEntity; <add>import net.minecraft.util.IWorldPosCallable; <add>import net.minecraft.util.math.BlockPos; <add>import net.minecraft.world.World; <add>import net.minecraftforge.items.CapabilityItemHandler; <add>import net.minecraftforge.items.IItemHandler; <add>import net.minecraftforge.items.SlotItemHandler; <add>import net.minecraftforge.items.wrapper.InvWrapper; <add> <add>import static net.secknv.nkmod.RegistryHandler.GRINDER; <add>import static net.secknv.nkmod.RegistryHandler.GRINDER_CONTAINER; <add> <add>public class GrinderBlockContainer extends Container { <add> <add> private TileEntity tileEntity; <add> private PlayerEntity playerEntity; <add> private IItemHandler playerInventory; <add> <add> public GrinderBlockContainer(int windowId, World world, BlockPos pos, PlayerInventory playerInventory, PlayerEntity player) { <add> super(GRINDER_CONTAINER.get(), windowId); <add> tileEntity = world.getTileEntity(pos); <add> this.playerEntity = player; <add> this.playerInventory = new InvWrapper(playerInventory); <add> <add> if (tileEntity != null) { <add> tileEntity.getCapability(CapabilityItemHandler.ITEM_HANDLER_CAPABILITY).ifPresent(h -> { <add> // Furnace input slot <add> addSlot(new SlotItemHandler(h, 0, 56, 17)); <add> // Furnace fuel slot <add> // todo: item handler for fuel <add> //addSlot(new SlotItemHandler(h, 1, 56, 53)); <add> // Furnace result slot <add> // todo: item handler / code for output <add> //addSlot(new SlotItemHandler(h, 2, 116, 35)); <add> <add> }); <add> } <add> <add> layoutPlayerInventorySlots(8, 84); <add> } <add> <add> @Override <add> public boolean canInteractWith(PlayerEntity playerIn) { <add> return isWithinUsableDistance(IWorldPosCallable.of(tileEntity.getWorld(), tileEntity.getPos()), playerEntity, GRINDER.get()); <add> } <add> <add> private int addSlotRange(IItemHandler handler, int index, int x, int y, int amount, int dx) { <add> for (int i = 0; i < amount; i++) { <add> addSlot(new SlotItemHandler(handler, index, x, y)); <add> x+=dx; <add> index++; <add> } <add> return index; <add> } <add> <add> private int addSlotBox(IItemHandler handler, int index, int x, int y, int horAmount, int dx, int verAmount, int dy) { <add> for (int j = 0; j < verAmount; j++) { <add> index = addSlotRange(handler, index, x, y, horAmount, dx); <add> y+=dy; <add> } <add> return index; <add> } <add> <add> private void layoutPlayerInventorySlots(int leftCol, int topRow) { <add> // Player inventory <add> addSlotBox(playerInventory, 9, leftCol, topRow, 9, 18, 3, 18); <add> // Hotbar <add> topRow += 58; <add> addSlotRange(playerInventory, 0, leftCol, topRow, 9, 18); <add> } <add>}
Java
mit
04c7b481a47c328fd46f60314fb781c9cf9f25a4
0
sandeepsas/PhonePark
package com.uic.sandeep.phonepark; import android.annotation.SuppressLint; import android.app.*; import android.app.PendingIntent; import android.bluetooth.BluetoothAdapter; import android.bluetooth.BluetoothDevice; import android.content.*; import android.graphics.*; import android.hardware.*; import android.location.*; import android.location.LocationManager; import android.net.ConnectivityManager; import android.net.NetworkInfo; import android.os.*; import android.provider.Settings; import android.speech.tts.TextToSpeech; import android.support.multidex.MultiDex; import android.support.v4.app.FragmentActivity; import android.support.v4.content.LocalBroadcastManager; import android.text.Layout; import android.text.method.ScrollingMovementMethod; import android.util.Log; import android.view.*; import android.widget.*; import com.google.android.gms.common.ConnectionResult; import com.google.android.gms.common.GooglePlayServicesUtil; import com.google.android.gms.common.api.GoogleApiClient; import com.google.android.gms.common.api.PendingResult; import com.google.android.gms.common.api.Status; import com.google.android.gms.location.DetectedActivity; import com.google.android.gms.location.LocationListener; import com.google.android.gms.location.LocationRequest; import com.google.android.gms.location.LocationServices; import com.google.android.gms.maps.CameraUpdateFactory; import com.google.android.gms.maps.GoogleMap; import com.google.android.gms.maps.MapFragment; import com.google.android.gms.maps.OnMapReadyCallback; import com.google.android.gms.maps.model.BitmapDescriptorFactory; import com.google.android.gms.maps.model.CameraPosition; import com.google.android.gms.maps.model.LatLng; import com.google.android.gms.maps.model.Marker; import com.google.android.gms.maps.model.MarkerOptions; import com.google.android.gms.maps.model.Polygon; import com.google.android.gms.maps.model.PolygonOptions; import com.google.android.gms.maps.model.Polyline; import com.google.android.gms.maps.model.PolylineOptions; import com.google.android.gms.nearby.connection.AppMetadata; import com.google.android.gms.nearby.connection.Connections; import com.skyhookwireless.wps.WPSContinuation; import com.skyhookwireless.wps.WPSLocation; import com.skyhookwireless.wps.WPSLocationCallback; import com.skyhookwireless.wps.WPSReturnCode; import com.uic.sandeep.phonepark.MotionState.Source; import com.uic.sandeep.phonepark.MotionState.Type; import com.uic.sandeep.phonepark.blocksmap.ParkingBlock; import com.uic.sandeep.phonepark.bluetooth.BTPendingDetection; import com.uic.sandeep.phonepark.bluetooth.BluetoothConnectionService; import com.uic.sandeep.phonepark.classification.ClassificationManager; import com.uic.sandeep.phonepark.classification.WekaClassifier; import com.uic.sandeep.phonepark.fusion.FusionManager; import com.uic.sandeep.phonepark.googleacitvityrecognition.GoogleActivityRecognitionClientRemover; import com.uic.sandeep.phonepark.googleacitvityrecognition.GoogleActivityRecognitionClientRequester; import com.uic.sandeep.phonepark.indicator.accelerometerbased.AccelerometerFeature; import com.uic.sandeep.phonepark.indicator.iodetectors.CellTowerChart; import com.uic.sandeep.phonepark.indicator.iodetectors.DetectionProfile; import com.uic.sandeep.phonepark.indicator.iodetectors.LightChart; import com.uic.sandeep.phonepark.indicator.iodetectors.MagnetChart; import com.uic.sandeep.phonepark.managers.AudioRecordManager; import com.uic.sandeep.phonepark.managers.EventDetectionNotificationManager; import com.uic.sandeep.phonepark.managers.LogManager; import com.uic.sandeep.phonepark.managers.WakeLockManager; import com.uic.sandeep.phonepark.sensorlist.Sensors; import java.io.IOException; import java.text.SimpleDateFormat; import java.util.*; /** *Sample application that demonstrates the use of *ActivityRecognitionClient}.It registers for activity detection updates *at a rate of 20seconds,logs them to a file,and displays the detected *activities with their associated confidence levels. *<p> *An IntentService receives activity detection updates in the background *so that detection can continue even if the Activity is not visible. */ public class MainActivity extends FragmentActivity implements Connections, GoogleApiClient.OnConnectionFailedListener, TextToSpeech.OnInitListener, GoogleApiClient.ConnectionCallbacks, OnMapReadyCallback { private static List<LatLng> PBRoutes; public static void setPBRoutes(List<LatLng> PBRoutes) { MainActivity.PBRoutes = PBRoutes; } protected void attachBaseContext(Context base) { super.attachBaseContext(base); MultiDex.install(this); } // Google Api Client public static GoogleApiClient mGoogleApiClient; public static boolean isParked = false; /** * Stores parameters for requests to the FusedLocationProviderApi. */ protected LocationRequest mLocationRequest; private static Context mContext; protected LocationRequest mParkingLocationRequest; // Unique ID for the User public static String userID; public static final String LOG_TAG=MainActivity.class.getCanonicalName(); private static final String LOCK_TAG="ACCELEROMETER_MONITOR"; private BTPendingDetection pendingBTDetection = null; private int currentTransportationMode = DetectedActivity.UNKNOWN; private boolean onCreateCalled = false; /** * UI Widgets */ // Holds the text view public static TextView text_navigation; public static TextView text_parking_info; public static ImageButton reportParkDepark; private static TextView consoleTextView, environTextView, stateTextView, googleStateTextView; public static final String ENVIRONMENT_PREFIX="Environment : "; public static final String STATE_PREFIX="Motion State Classified : "; public static final String GOOGLE_MOBILITY_STATE_PREFIX="Motion State Google : "; public static final String INDICATOR_PREFIX="Indicator : "; public static GoogleMap mMap; public static Polyline currentPolyline = null; public static Marker[] currentMarkers = null; public static TextToSpeech mSpeech; static Vector<ParkingBlock> parkingBlocks = null; static Vector<ParkingBlock> nearestParkingBlocks = null; static Vector<ParkingBlock> cached_nearestParkingBlocks = null; public static List<LatLng> pb_route_list = new ArrayList<LatLng>(); /** * Holds activity recognition data, in the form of * strings that can contain markup */ //private ArrayAdapter<Spanned> mStatusAdapter; //Instance of a Bluetooth adapter private BluetoothAdapter mBluetoothAdapter; /** * Intent filter for incoming broadcasts from the * IntentService. */ IntentFilter mBroadcastFilter; // Instance of a local broadcast manager private LocalBroadcastManager mBroadcastManager; //Instance of a customized location manager private LocationManager mLocationManager; // The logger object private LogManager mLogManager; // Instance of customized notification manager private EventDetectionNotificationManager mEventDetectionNotificationManager; // The wake lock manager object private WakeLockManager mWakeLockManager; /** * Google Activity Update Fields */ private GoogleActivityRecognitionClientRequester mGoogleActivityDetectionRequester; private GoogleActivityRecognitionClientRemover mGoogleActivityDetectionRemover; private double[] probOfOnFootAndInVehicleOfLastUpdate=new double[2]; /** * MST */ private PastMotionStates mPastGoogleActivities=new PastMotionStates(Source.Google, Constants.GOOGLE_ACTIVITY_LAST_STATE_NO); private PastMotionStates mPastClassifiedMotionStates=new PastMotionStates(Source.Classifier, Constants.NO_OF_PAST_STATES_STORED); private CachedDetectionList mCachedUnparkingDetectionList=new CachedDetectionList(CachedDetection.Type.Unparking); private CachedDetectionList mCachedParkingDetectionList=new CachedDetectionList(CachedDetection.Type.Parking); private double[] lastClassifiedMotionStateDistr=null; private double[] lastAccReading; private SensorManager mSensorManager; private Sensor mAccelerometer; private AudioRecordManager mAudioRecordManager; private FusionManager mFusionManager; /** * Detection Interval Fields */ private long lastParkingTimestamp=-1; private long lastUnparkingTimestamp=-1; /** * IODetector fields */ private CellTowerChart cellTowerChart; private LightChart lightChart; private MagnetChart magnetChart; private Handler mIODectorHandler; private boolean aggregationFinish = true; private boolean phoneNotStill = false; private int lastEnvironment=Constants.ENVIRON_UNKNOWN; private double probabilityOfLastEnvironment; private ArrayList<Integer> pastEnvironments=new ArrayList<Integer>(); private int reportGlobalNumber = 0; /** * Indicator Fusion */ @SuppressLint("UseSparseArrays") private HashMap<Integer, ArrayList<Double>> lastVectors=new HashMap<Integer, ArrayList<Double>>(); // The classification manager object private ClassificationManager mClassificationManager; // Store the current request type (ADD or REMOVE) private Constants.REQUEST_TYPE mRequestType; private SensorManager mSensorManageForMap; private Sensor accelerometer; private Sensor magnetometer; private float[] mGravity; private float[] mGeomagnetic; private SensorEventListener mSensorListnerForMap = new SensorEventListener(){ @Override public void onSensorChanged(SensorEvent event) { if (event.sensor.getType() == Sensor.TYPE_ACCELEROMETER) mGravity = event.values; if (event.sensor.getType() == Sensor.TYPE_MAGNETIC_FIELD) mGeomagnetic = event.values; if (mGravity != null && mGeomagnetic != null) { float R[] = new float[9]; float I[] = new float[9]; boolean success = SensorManager.getRotationMatrix(R, I, mGravity, mGeomagnetic); if (success) { float orientation[] = new float[3]; SensorManager.getOrientation(R, orientation); float azimut = orientation[0]; // orientation contains: azimut, pitch and roll azimuthInDegress = (float)Math.toDegrees(azimut); if (azimuthInDegress < 0.0f) { azimuthInDegress += 360.0f; } } } } @Override public void onAccuracyChanged(Sensor sensor, int accuracy) { } }; private static Location currentLoc; private static float azimuthInDegress =0; public static void adjCameraMap(Location loci1) { currentLoc = loci1; if(currentLoc.getSpeed()<10){ CameraPosition cameraPosition = new CameraPosition.Builder() .target(new LatLng(currentLoc.getLatitude(), currentLoc.getLongitude()))// Sets the center of the map to Mountain View .bearing(azimuthInDegress).tilt(30).zoom(17).build(); MainActivity.mMap.animateCamera(CameraUpdateFactory.newCameraPosition(cameraPosition)); }else{ CameraPosition cameraPosition = new CameraPosition.Builder() .target(new LatLng(currentLoc.getLatitude(), currentLoc.getLongitude()))// Sets the center of the map to Mountain View .bearing(currentLoc.getBearing()).tilt(30).zoom(17).build(); MainActivity.mMap.animateCamera(CameraUpdateFactory.newCameraPosition(cameraPosition)); } } public class BluetoothLocationClientListener implements LocationListener { int eventCode; public BluetoothLocationClientListener(int eventCode){ this.eventCode=eventCode; } @Override public void onLocationChanged(Location location) { //(new GetAddressTask(eventCode)).execute(location); BTParkingLocationReceived(eventCode, location, null); } } @Override public void onMapReady(final GoogleMap map) { this.mMap = map; mMap.setMyLocationEnabled(true); LatLng chicago = new LatLng(41.88, -87.62); mMap.moveCamera(CameraUpdateFactory.newLatLngZoom(chicago, 13)); Polygon bbx = mMap.addPolygon(new PolygonOptions() .add(new LatLng(41.884, -87.6245), new LatLng(41.8840, -87.6636), new LatLng(41.8677,-87.6641), new LatLng(41.8658, -87.6639), new LatLng(41.8633, -87.6614), new LatLng(41.8631, -87.6254) ) .strokeColor(Color.RED)); } public class ParkingSearchClientListener implements LocationListener { public ParkingSearchClientListener(){ } @Override public void onLocationChanged(Location location) { mMap.clear(); Polygon bbx = mMap.addPolygon(new PolygonOptions() .add(new LatLng(41.884, -87.6245), new LatLng(41.8840, -87.6636), new LatLng(41.8677,-87.6641), new LatLng(41.8658, -87.6639), new LatLng(41.8633, -87.6614), new LatLng(41.8631, -87.6254) ).zIndex(10) .strokeColor(Color.RED)); CameraPosition cameraPosition = new CameraPosition.Builder() .target(new LatLng(location.getLatitude(), location.getLongitude()) )// Sets the center of the map to Mountain View .bearing(location.getBearing()).tilt(30).zoom(17).build(); mMap.animateCamera(CameraUpdateFactory.newCameraPosition(cameraPosition)); DisplayNearestParkBlock displayNearestParkBlock = new DisplayNearestParkBlock(location); displayNearestParkBlock.execute(); } } public void BTParkingLocationReceived(int eventCode, Location location, String address) { if(eventCode==Constants.OUTCOME_UNPARKING){ if (currentTransportationMode == DetectedActivity.IN_VEHICLE) { actionsOnBTDetection(eventCode, location, null); } else { pendingBTDetection = new BTPendingDetection(eventCode, location); Toast.makeText(getApplicationContext(), "btdetection pending", Toast.LENGTH_SHORT).show(); } }else{ pendingBTDetection = null; actionsOnBTDetection(eventCode, location, null); } } public class FusionLocationClientListener implements LocationListener{ int eventCode; public FusionLocationClientListener(int eventCode){ this.eventCode=eventCode; } @Override public void onLocationChanged(Location location) { //(new GetAddressTask(eventCode)).execute(location); onLocationRetrieved(eventCode, location, null); } } // actions taken when a parking/unparking event is detected and the location of the event is retrieved private void actionsOnBTDetection(int eventCode, Location location, String address){ //latestLocation=getLatestLocationFromIndividualProvider(location); int resID; String prefix; float markerColor; if(eventCode==Constants.OUTCOME_PARKING){ resID=R.raw.vehicle_parked; prefix=Constants.PARKING_NOTIFICATION; markerColor= BitmapDescriptorFactory.HUE_AZURE; }else{ resID=R.raw.vehicle_deparked; prefix=Constants.UNPARKING_NOTIFICATION; markerColor=BitmapDescriptorFactory.HUE_RED; } //String curTimeString=CommonUtils.formatTimestamp(new Date(),formatTemplate); String curTimeString=CommonUtils.formatTimestamp( new Date(location.getTime()), "ddMMyyyyhhmmss" ); Log.e(LOG_TAG, curTimeString + " \n" + location.toString()); /* * actions */ //1. send the text notification //String notificationMsg=prefix+" "+curTimeString; //if(address!=null) notificationMsg+=address; //mEventDetectionNotificationManager.sendTextNotification(notificationMsg); //Toast.makeText(getApplicationContext(), notificationMsg, 2).show(); //2. play the sound //mEventDetectionNotificationManager.playVoiceNotification(resID); reportGlobalNumber++; if (resID==R.raw.vehicle_parked) { String announcementOnce = reportGlobalNumber + " Bluetooth detected parking "; String announcement = announcementOnce + announcementOnce;// + announcementOnce; //mSpeech.speak(reportGlobalNumber + " Blue tooth detected parking at " + curTimeString, TextToSpeech.QUEUE_ADD, null); mSpeech.stop(); mSpeech.speak(announcement, TextToSpeech.QUEUE_ADD, null); Toast.makeText(getApplicationContext(), "Bluetooth detected parking", Toast.LENGTH_LONG).show(); SendParkReport sendPark = new SendParkReport(location, curTimeString, 1); sendPark.execute(); isParked = true; } if (resID==R.raw.vehicle_deparked) { String announcementOnce = reportGlobalNumber + " Bluetooth detected leaving parking space "; String announcement = announcementOnce + announcementOnce;// + announcementOnce; //mSpeech.speak(reportGlobalNumber + " Blue tooth detected leaving parking space at " + curTimeString, TextToSpeech.QUEUE_ADD, null); mSpeech.stop(); mSpeech.speak(announcement, TextToSpeech.QUEUE_ADD, null); Toast.makeText(getApplicationContext(), "Bluetooth detected leaving parking space", Toast.LENGTH_LONG).show(); SendParkReport sendDePark = new SendParkReport(location,curTimeString,0); sendDePark.execute(); isParked = false; } //3. log the address of event String logMsg=prefix+"\n Location retrieval time:"+curTimeString+"\nlocation:"+location.toString()+"\n"; if(address!=null){ //logMsg+=address+"\n"; //logMsg+=pastEnvironments.toString()+"\n"+pastMotionStates+"\n"; } mLogManager.log(logMsg, Constants.LOG_FILE_TYPE[Constants.LOG_TYPE_DETECTION_REPORT]); //4. show on the map if(currentPolyline!=null){ currentPolyline.remove(); } if(currentMarkers!=null){ for (int k =0;k<currentMarkers.length;k++){ currentMarkers[k].remove(); } } //mEventDetectionNotificationManager.addMarkersToMap(mMap, curTimeString, prefix // , location.getLatitude(), location.getLongitude(), location.getAltitude(), markerColor); //center and zoom in the map CameraPosition cameraPosition = new CameraPosition.Builder() .target(new LatLng(location.getLatitude(), location.getLongitude()) )// Sets the center of the map to Mountain View .bearing(location.getBearing()).tilt(30).zoom(17).build(); mMap.animateCamera(CameraUpdateFactory.newCameraPosition(cameraPosition)); //5. update availability display //updateAvailabilityDisplay(eventCode, location); //add a marker on the map Log.e(LOG_TAG, "operations on map completed"); Polygon bbx = mMap.addPolygon(new PolygonOptions() .add(new LatLng(41.884, -87.6245), new LatLng(41.8840, -87.6636), new LatLng(41.8677,-87.6641), new LatLng(41.8658, -87.6639), new LatLng(41.8633, -87.6614), new LatLng(41.8631, -87.6254) ) .strokeColor(Color.RED)); } public boolean isNetworkAvailable() { ConnectivityManager connectivityManager = (ConnectivityManager) getSystemService(Context.CONNECTIVITY_SERVICE); NetworkInfo activeNetworkInfo = connectivityManager.getActiveNetworkInfo(); return activeNetworkInfo != null && activeNetworkInfo.isConnected(); } public double distFromVehicleToIntersection(double cLat,double cLon,double iLat,double iLong) { double earthRadius = 3958.75; double dLat = Math.toRadians(iLat-cLat); double dLng = Math.toRadians(iLong-cLon); double sindLat = Math.sin(dLat / 2); double sindLng = Math.sin(dLng / 2); double a = Math.pow(sindLat, 2) + Math.pow(sindLng, 2) * Math.cos(Math.toRadians(cLat)) * Math.cos(Math.toRadians(iLat)); double c = 2 * Math.atan2(Math.sqrt(a), Math.sqrt(1-a)); double dist = earthRadius * c; return dist; } /** * A subclass of AsyncTask that calls getFromLocation() in the background. * The class definition has these generic types: Location - A Location * object containing the current location. Void - indicates that progress * units are not used String - An address passed to onPostExecute() */ private class GetAddressTask extends AsyncTask<Location, Void, String> { int eventCode; Location mLocation; public GetAddressTask(int eventCode) { super(); this.eventCode=eventCode; } /** * Get a Geocoder instance, get the latitude and longitude look up the * address, and return it * * @params params One or more Location objects * @return A string containing the address of the current location, or * an empty string if no address can be found, or an error * message */ @Override protected String doInBackground(Location... params) { Geocoder geocoder = new Geocoder(getApplicationContext(), Locale.getDefault()); // Get the current location from the input parameter list Location loc = params[0]; mLocation=loc; // Create a list to contain the result address List<Address> addresses = null; try { /* * Return 1 address. */ addresses = geocoder.getFromLocation(loc.getLatitude(), loc.getLongitude(), 1); } catch (IOException e1) { Log.e("LocationSampleActivity", "IO Exception in getFromLocation()"); e1.printStackTrace(); return ("IO Exception trying to get address"); } catch (IllegalArgumentException e2) { // Error message to post in the log String errorString = "Illegal arguments " + Double.toString(loc.getLatitude()) + " , " + Double.toString(loc.getLongitude()) + " passed to address service"; Log.e("LocationSampleActivity", errorString); e2.printStackTrace(); return errorString; } // If the reverse geocode returned an address if (addresses != null && addresses.size() > 0) { // Get the first address Address address = addresses.get(0); /* * Format the first line of address (if available), city, and * country name. */ String addressText = String.format( "%s, %s, %s", // If there's a street address, add it address.getMaxAddressLineIndex() > 0 ? address .getAddressLine(0) : "", // Locality is usually a city address.getLocality(), // The country of the address address.getCountryName()); // Return the text return addressText; } else { return "No address found"; } } /** * A method that's called once doInBackground() completes. Turn * off the indeterminate activity indicator and set * the text of the UI element that shows the address. If the * lookup failed, display the error message. */ @Override protected void onPostExecute(String address) { // Display the results of the lookup. onLocationRetrieved(eventCode, mLocation, address); } } /** Callback when a message is sent from some service */ private final BroadcastReceiver mBroadcastReceiver = new BroadcastReceiver() { @SuppressLint("UseSparseArrays") @Override public void onReceive(Context context, Intent intent) { String action=intent.getAction(); Log.e(LOG_TAG, action); if(action.equals("BT_Alert_Box")) { selectInitBtDevice(); } if(action.equals(Constants.BLUETOOTH_CONNECTION_UPDATE)) { int eventCode = intent.getIntExtra(Constants.BLUETOOTH_CON_UPDATE_EVENT_CODE, Constants.OUTCOME_NONE); System.out.println(eventCode); mLocationRequest = new LocationRequest(); mLocationRequest.setPriority(LocationRequest.PRIORITY_HIGH_ACCURACY).setNumUpdates(1); LocationServices.FusedLocationApi.requestLocationUpdates( mGoogleApiClient, mLocationRequest, new BluetoothLocationClientListener(eventCode)); }else{ //TODO return from Google activity update if(action.equals(Constants.GOOGLE_ACTIVITY_RECOGNITION_UPDATE)){ String mostLikelyActivity=intent.getStringExtra(Constants.GOOGLE_ACT_UPDATE_MOST_LIKELY_ACTIVITY_TYPE); float mostLikelyActivityConfidence=intent.getFloatExtra(Constants.GOOGLE_ACT_UPDATE_MOST_LIKELY_ACTIVITY_CONFIDENCE, 0); float onFootConfidence=intent.getFloatExtra(Constants.GOOGLE_ACT_UPDATE_ON_FOOT_ACTIVITY_CONFIDENCE, 0); float inVehicleConfidence=intent.getFloatExtra(Constants.GOOGLE_ACT_UPDATE_IN_VEHICLE_ACTIVITY_CONFIDENCE, 0); int mostLikelyActivityType=intent.getIntExtra(Constants.GOOGLE_ACT_UPDATE_MOST_LIKELY_ACTIVITY_TYPE_INT, DetectedActivity.UNKNOWN); if(mostLikelyActivityType==DetectedActivity.UNKNOWN){ if(inVehicleConfidence>100-inVehicleConfidence-mostLikelyActivityConfidence) mostLikelyActivityType=DetectedActivity.IN_VEHICLE; else{ if(onFootConfidence>100-onFootConfidence-mostLikelyActivityConfidence) mostLikelyActivityType=DetectedActivity.ON_FOOT; } } currentTransportationMode = mostLikelyActivityType; if (currentTransportationMode == DetectedActivity.IN_VEHICLE) { if (pendingBTDetection != null && pendingBTDetection.eventCode() == Constants.OUTCOME_UNPARKING) { Toast.makeText(getApplicationContext(), "mode=invehicle, bt detection confirmed", Toast.LENGTH_LONG).show(); actionsOnBTDetection(pendingBTDetection.eventCode(), pendingBTDetection.location(), null); pendingBTDetection = null; } } MotionState.Type activityType=MotionState.translate(mostLikelyActivityType); mPastGoogleActivities.add(activityType); if(activityType==MotionState.Type.IN_VEHICLE ||activityType==MotionState.Type.ON_FOOT){ int outcome; CachedDetection oldestNotExpiredCachedDetection=null; if(activityType==MotionState.Type.IN_VEHICLE){ outcome=Constants.OUTCOME_UNPARKING; oldestNotExpiredCachedDetection=mCachedUnparkingDetectionList.get(0); }else{ outcome=Constants.OUTCOME_PARKING; oldestNotExpiredCachedDetection=mCachedParkingDetectionList.get(0); } if(mPastGoogleActivities.isTransitionTo(activityType) &&oldestNotExpiredCachedDetection!=null){ onDetectionConfirmed(outcome, oldestNotExpiredCachedDetection.location, oldestNotExpiredCachedDetection.address); } } //update the textview googleStateTextView.setText(GOOGLE_MOBILITY_STATE_PREFIX+mostLikelyActivity+" conf:"+mostLikelyActivityConfidence + " f:"+onFootConfidence+",v:"+inVehicleConfidence); //build the new MST vector double[] probsOfNewUpdate=null; if(probOfOnFootAndInVehicleOfLastUpdate!=null){ probsOfNewUpdate=new double[]{onFootConfidence/100, inVehicleConfidence/100}; ArrayList<Double> features=new ArrayList<Double>(); features.add(probOfOnFootAndInVehicleOfLastUpdate[0]); features.add(probOfOnFootAndInVehicleOfLastUpdate[1]); features.add(probsOfNewUpdate[0]); features.add(probsOfNewUpdate[0]); HashMap<Integer, ArrayList<Double>> mstVector=new HashMap<Integer, ArrayList<Double>>(); mstVector.put(Constants.INDICATOR_MST, features); // Log.d(LOG_TAG, "Google MST Vector: "+features.toString()); } probOfOnFootAndInVehicleOfLastUpdate=probsOfNewUpdate; } } } }; //accelerometer feature window and its neighboring windows private ArrayList<AccelerometerFeature> civVectorsWithinScope=new ArrayList<AccelerometerFeature>(); //TODO mSensnorEvent @SuppressLint("UseSparseArrays") public static long acceleometerSeq=0; private final SensorEventListener mSensorEventListener = new SensorEventListener() { @SuppressLint("UseSparseArrays") public void onSensorChanged(SensorEvent event) { // check if the accelerometer readings have changed since last sample boolean readingChanged=false; for(int i=0;i<event.values.length;i++){ if(event.values[i]!=lastAccReading[i]){ readingChanged=true; lastAccReading[i]=event.values[i]; } } if(!readingChanged) return; acceleometerSeq=(acceleometerSeq+1)%Integer.MAX_VALUE; // requires a wake lock mWakeLockManager.lock(LOCK_TAG); /** * Get the parameter values from the preference */ SharedPreferences mPrefs=getSharedPreferences(Constants.SHARED_PREFERENCES, 0); boolean classifierForCIVOn=mPrefs.getBoolean(Constants.PREFERENCE_KEY_CIV_CLASSIFIER_ON, false); boolean logOn=mPrefs.getBoolean(Constants.LOGGING_ON, false); boolean isOutdoor=mPrefs.getBoolean(Constants.PREFERENCE_KEY_IS_OUTDOOR, false); // log the raw readings String record=CommonUtils.buildALogRecordForNewAccelerometerReadings(event); if(record!=null) phoneNotStill=true; else phoneNotStill=false; boolean logRawOn=mPrefs.getBoolean(Constants.LOGGING_ACCL_RAW_SWITCH, false); if(logOn&&logRawOn){ mLogManager.log(record, Constants.LOG_FILE_TYPE[Constants.LOG_TYPE_ACCEL_RAW]); } int outcome=Constants.OUTCOME_NONE; //conditions for early exit based on environment if( (lastEnvironment==Constants.ENVIRON_INDOOR&&probabilityOfLastEnvironment>0.8) //|| !pastMotionStates.contains((Integer)Constants.STATE_DRIVING) ){ if(!isOutdoor)//not set to outdoor environment return; } /*boolean localDebug=true;//TODO for debug only if(localDebug) return; */ //boolean useGoogleActivityInFusion=mPrefs.getBoolean(Constants.PREFERENCE_KEY_USE_GOOGLE_ACTIVITY_IN_FUSION, false); //MST Classifier And Fusion AccelerometerFeature motionStateFeatures=mClassificationManager.mMSTFeatureExtraction.extractWindowFeature(event); if(motionStateFeatures!=null){ String motionStateInstance=motionStateFeatures.asStringForMotationState(); WekaClassifier motionStateClassifier=mClassificationManager.mClassfiers.get(Constants.ACCEL_MOTION_STATE); double[] distr=motionStateClassifier.classify(motionStateInstance); Log.e(LOG_TAG, "motion state classifier output is : " + Arrays.toString(distr)); /** * Get the motion state with largest probability */ int predClassIdx=CommonUtils.idxOfMax(distr); if(predClassIdx!=-1){ String predClass=Constants.CLASSIFIER_CLASS[1][predClassIdx]; if(!phoneNotStill) predClass="Still"; Log.e(LOG_TAG, "cur motion state="+predClass); stateTextView.setText(STATE_PREFIX+predClass); mPastClassifiedMotionStates.add(MotionState.translate(predClass)); } //early exit based on state if(//!mPastGoogleActivities.containsAtLeastOneWalkingAndOneParking() !mPastClassifiedMotionStates.containsAtLeastMOnFootAndAtLeastNInVehicleStates(1,1) ) return; if(lastClassifiedMotionStateDistr!=null){ //build the vector of the MST indicator ArrayList<Double> mstVector=new ArrayList<Double>(); mstVector.add(lastClassifiedMotionStateDistr[0] ); mstVector.add(lastClassifiedMotionStateDistr[1]); mstVector.add(distr[0]); mstVector.add(distr[1]); Log.e(LOG_TAG, acceleometerSeq+" new mst vector is :"+mstVector.toString()); HashMap<Integer, ArrayList<Double>> newPeriodicalVector=new HashMap<Integer, ArrayList<Double>>(); newPeriodicalVector.put(Constants.INDICATOR_MST, mstVector); outcome=mFusionManager.fuse(lastVectors, newPeriodicalVector, System.currentTimeMillis(), Constants.HIGH_LEVEL_ACTIVITY_UPARKING, mLogManager); } lastClassifiedMotionStateDistr=distr; //lastMotionStateDistr=new double[distr.length]; //for(int ii=0;ii<distr.length;ii++) lastMotionStateDistr[ii]=distr[ii]; }else{ if(//!mPastGoogleActivities.containsAtLeastOneWalkingAndOneParking() !mPastClassifiedMotionStates.containsAtLeastMOnFootAndAtLeastNInVehicleStates(1,1) ) return; } AccelerometerFeature civFeatures=mClassificationManager.mCIVFeatureExtraction.extractWindowFeature(event); if(civFeatures!=null){ //get the vector of the Change-In-Variance features String civVector=mClassificationManager.mCIVFeatureExtraction.extractCIVVector(civFeatures, civVectorsWithinScope); if( civVector!=null){ Log.e(LOG_TAG, acceleometerSeq+" new civ vector is : "+civVector); boolean logAcclFeaturesOn=mPrefs.getBoolean(Constants.LOGGING_ACCL_FEATURES_SWITCH, false); if(logOn&&logAcclFeaturesOn){ // log the Change-In-Variance Classifier predicated result mLogManager.log(civVector, Constants.LOG_FILE_TYPE[Constants.LOG_TYPE_ACCEL_FEATURE]); } /** * calculate the probability of the outcome */ if(!classifierForCIVOn){ HashMap<Integer, ArrayList<Double>> newPeriodicalVector=new HashMap<Integer, ArrayList<Double>>(); newPeriodicalVector.put(Constants.INDICATOR_CIV,CommonUtils.stringToDoubleListRemoved(civVector, ",", new int[]{0}) ); outcome=mFusionManager.fuse(lastVectors, newPeriodicalVector, System.currentTimeMillis(),Constants.HIGH_LEVEL_ACTIVITY_UPARKING, mLogManager); } /** * classify the vector of the Change-In-Variance vectors */ else{ WekaClassifier changeInVarianceClassifier=mClassificationManager.mClassfiers.get(Constants.ACCEL_CHANGE_IN_VAR); double[] distr=changeInVarianceClassifier.classify(civVector); int predClassInt=CommonUtils.idxOfMax(distr); String predClass=",n"; switch(predClassInt){ case Constants.CIV_SIGNI_INCREASE: case Constants.CIV_SIGNI_DECREASE: //log the feature if(predClassInt==Constants.CIV_SIGNI_INCREASE){ predClass=",p"; outcome=Constants.OUTCOME_PARKING; } else{ predClass=",u"; outcome=Constants.OUTCOME_UNPARKING; } break; case Constants.STATE_STILL: // log the feature predClass=",t"; //release the lock mWakeLockManager.unlock(LOCK_TAG); outcome=Constants.OUTCOME_NONE; break; default: outcome=Constants.OUTCOME_NONE; break; } System.out.println(predClass); } } } boolean logDetectionOn=mPrefs.getBoolean(Constants.LOGGING_DETECTION_SWITCH, false); switch(outcome){ case Constants.OUTCOME_PARKING: case Constants.OUTCOME_UNPARKING: mLocationRequest = new LocationRequest(); mLocationRequest.setPriority(LocationRequest.PRIORITY_HIGH_ACCURACY).setNumUpdates(1); LocationServices.FusedLocationApi.requestLocationUpdates( mGoogleApiClient, mLocationRequest,new FusionLocationClientListener(outcome)); /* mLocationClient.requestLocationUpdates( LocationRequest.create() .setNumUpdates(1) .setPriority(LocationRequest.PRIORITY_HIGH_ACCURACY), new FusionLocationClientListener(outcome));*/ //} break; case Constants.OUTCOME_NONE: if(logOn){ if(logDetectionOn){ mLogManager.log("outcome="+outcome+"\n"+mFusionManager.fusionProcessLog.toString()+"\n", Constants.LOG_FILE_TYPE[Constants.LOG_TYPE_DETECTION_REPORT]); } } default: break; } } public void onAccuracyChanged(Sensor sensor, int accuracy) { } }; /** * A single callback class that will be used to handle * all location notifications sent by WPS. */ private class XPSLocationCallback implements WPSLocationCallback { private int eventCode; public XPSLocationCallback(int eventCode) { this.eventCode=eventCode; } public void done(){ } public WPSContinuation handleError(final WPSReturnCode error) { // To retry the location call on error use WPS_CONTINUE, // otherwise return WPS_STOP Log.e(LOG_TAG, "WPS API return error "+error.toString()); //return WPSContinuation.WPS_CONTINUE; return WPSContinuation.WPS_STOP; } @Override public void handleWPSLocation(WPSLocation location) { //actionsOnParkingLocation(eventCode, (Location) location); } } /* * Set main UI layout, get a handle to the ListView for logs, and create the broadcast * receiver. */ @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); if (onCreateCalled) { return; } else { onCreateCalled = true; } mContext = this; mSensorManageForMap = (SensorManager) getSystemService(Context.SENSOR_SERVICE); accelerometer = mSensorManageForMap.getDefaultSensor(Sensor.TYPE_ACCELEROMETER); magnetometer = mSensorManageForMap.getDefaultSensor(Sensor.TYPE_MAGNETIC_FIELD); mSensorManageForMap.registerListener(mSensorListnerForMap, accelerometer, SensorManager.SENSOR_DELAY_NORMAL); mSensorManageForMap.registerListener(mSensorListnerForMap, magnetometer, SensorManager.SENSOR_DELAY_NORMAL); /***************************************************/ /** * Set the views */ // Set the main layout setContentView(R.layout.activity_main); text_parking_info = (TextView) findViewById(R.id.textview_park); text_navigation = (TextView) findViewById(R.id.textview1); // set up the map view MapFragment mapFragment = (MapFragment) getFragmentManager() .findFragmentById(R.id.map); mapFragment.getMapAsync(this); //Wait till internet connection is established userID = Settings.Secure.getString(getApplicationContext().getContentResolver(), Settings.Secure.ANDROID_ID); if (mGoogleApiClient == null) { mGoogleApiClient = new GoogleApiClient.Builder(this) .addConnectionCallbacks(this) .addOnConnectionFailedListener(this) .addApi(LocationServices.API) .build(); } // get a handle to the console textview consoleTextView = (TextView) findViewById(R.id.console_text_id); consoleTextView.setMovementMethod(new ScrollingMovementMethod()); //setup monitoring fields environTextView=(TextView) findViewById(R.id.environment); environTextView.setText(ENVIRONMENT_PREFIX+CommonUtils.eventCodeToString(lastEnvironment)); stateTextView=(TextView) findViewById(R.id.state); stateTextView.setText(STATE_PREFIX+"unknown"); googleStateTextView=(TextView) findViewById(R.id.google_state); googleStateTextView.setText(GOOGLE_MOBILITY_STATE_PREFIX+"unknown"); //indicatorTextView=(TextView) findViewById(R.id.indicator); //indicatorTextView.setText(INDICATOR_PREFIX); /*Send Data to Server*/ ImageButton park_search = (ImageButton)findViewById(R.id.Park); park_search.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { if(!mGoogleApiClient.isConnected()){ mGoogleApiClient.connect(); } isParked = false; mParkingLocationRequest = new LocationRequest(); mParkingLocationRequest.setPriority(LocationRequest.PRIORITY_HIGH_ACCURACY).setNumUpdates(1); LocationServices.FusedLocationApi.requestLocationUpdates( mGoogleApiClient, mParkingLocationRequest, new ParkingSearchClientListener()); } }); reportParkDepark = (ImageButton)findViewById(R.id.PDP); reportParkDepark.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { try { Location loc1 = LocationServices.FusedLocationApi.getLastLocation( mGoogleApiClient); int act = (Math.random() <= 0.5) ? 0 : 1; String time1 = CommonUtils.formatTimestamp(new Date(loc1.getTime()), "ddMMyyyyhhmmss"); SendParkReport sendAsync = new SendParkReport(loc1, time1, act); sendAsync.execute(); } catch (Exception ex) { ex.printStackTrace(); } } }); // Set the broadcast receiver intent filer mBroadcastManager = LocalBroadcastManager.getInstance(this); // Create a new Intent filter for the broadcast receiver mBroadcastFilter = new IntentFilter(Constants.ACTION_REFRESH_STATUS_LIST); mBroadcastFilter.addCategory(Constants.CATEGORY_LOCATION_SERVICES); mBroadcastFilter.addAction(Constants.BLUETOOTH_CONNECTION_UPDATE); mBroadcastFilter.addAction(Constants.GOOGLE_ACTIVITY_RECOGNITION_UPDATE); mBroadcastFilter.addAction("BT_Alert_Box"); mBroadcastManager.registerReceiver(mBroadcastReceiver, mBroadcastFilter); mBluetoothAdapter = BluetoothAdapter.getDefaultAdapter(); // Get the LogManager object mLogManager = LogManager.getInstance(this); mLocationManager = (LocationManager) getSystemService( Context.LOCATION_SERVICE ); /** * Start Google Activity Recognition */ mGoogleActivityDetectionRequester = new GoogleActivityRecognitionClientRequester(this); mGoogleActivityDetectionRemover = new GoogleActivityRecognitionClientRemover(this); startGoogleActivityRecognitionUpdates(null); mSpeech = new TextToSpeech(this, new TextToSpeech.OnInitListener() { @Override public void onInit(int status) { // TODO Auto-generated method stub int result = mSpeech.setLanguage(Locale.US); System.out.println("result = " + result); //mSpeech.speak("Vehicle deparked at 12:30", TextToSpeech.QUEUE_FLUSH, null); //check for successful instantiation if (status == TextToSpeech.SUCCESS) { if(mSpeech.isLanguageAvailable(Locale.US)==TextToSpeech.LANG_COUNTRY_AVAILABLE){ mSpeech.setLanguage(Locale.US); //Toast.makeText(this, "Set as en_US " + Locale.getDefault().getDisplayName(), Toast.LENGTH_LONG).show(); } } else if (status == TextToSpeech.ERROR) { //Toast.makeText(this, "Sorry! Text To Speech failed...", Toast.LENGTH_LONG).show(); } } }); checkGPSEnabled(); //TODO test record sample //mAudioRecordManager.recordAudioSample("/sdcard/audio.wav"); //Test extract features from audio files //String features=AudioFeatureExtraction.extractFeatures(this, "/sdcard/bus6.wav"); //mClassificationManager.mClassfiers.get(Constants.SENSOR_MICROPHONE).classify(features); } public static void showNearestAvailabilityMap(List<ParkingBlock> nearestParkingBlocks) { for (int i=0; i<nearestParkingBlocks.size(); i++) { ParkingBlock nearest_parkingBlock = nearestParkingBlocks.get(i); PolylineOptions line = new PolylineOptions().add(nearest_parkingBlock.startLocation, nearest_parkingBlock.endLocation) .width(20).color(nearest_parkingBlock.getColorByAvailability()); Polyline polyline = mMap.addPolyline(line); nearest_parkingBlock.display = polyline; } } public static void showParkableMap(List<LatLng> pblocks) { //Take the first 5 blocks and display if(currentPolyline!=null){ currentPolyline.remove(); } if(currentMarkers!=null){ for (int k =0;k<currentMarkers.length;k++){ currentMarkers[k].remove(); } } currentMarkers = new Marker[pblocks.size()-1]; // Bounding box for UIC Area Polygon bbx = mMap.addPolygon(new PolygonOptions() .add(new LatLng(41.884, -87.6245), new LatLng(41.8840, -87.6636), new LatLng(41.8677, -87.6641), new LatLng(41.8658, -87.6639), new LatLng(41.8633, -87.6614), new LatLng(41.8631, -87.6254)) .strokeColor(Color.RED)); for (int i=0; i<pblocks.size()-1; i++) { float rotationDegrees = (float) GetBearing(pblocks.get(i), pblocks.get(i+1)); // round it to a multiple of 3 and cast out 120s float adjBearing = Math.round(rotationDegrees / 3) * 3; while (adjBearing >= 120) { adjBearing -= 120; } float anchorX = 0.5f; float anchorY = 0.5f; Matrix matrix = new Matrix(); matrix.setRotate(adjBearing); Bitmap arrow_head = BitmapFactory.decodeResource(MainActivity.getContext().getResources(), R.drawable.dir_0); Bitmap arrowheadBitmap = Bitmap.createBitmap(arrow_head, 0, 0, arrow_head.getWidth(), arrow_head.getHeight(), matrix, true); currentMarkers[i] = mMap.addMarker(new MarkerOptions() .position(pblocks.get(i)) .anchor(anchorX, anchorY) .flat(true) // Cease Rotation .title(""+i) .icon(BitmapDescriptorFactory.fromBitmap(arrowheadBitmap))); } currentPolyline = mMap.addPolyline(new PolylineOptions() .addAll(pblocks) .width(5) .zIndex(100) .color(Color.BLACK)); /* CameraPosition cameraPosition = new CameraPosition.Builder() .target(new LatLng(location.getLatitude(), location.getLongitude()) ) .zoom(17) .bearing(location.getBearing()) .tilt(30) .build();*/ } static double degreesPerRadian = 180.0 / Math.PI; private static double GetBearing(LatLng from, LatLng to){ double lat1 = from.latitude * Math.PI / 180.0; double lon1 = from.longitude * Math.PI / 180.0; double lat2 = to.latitude * Math.PI / 180.0; double lon2 = to.longitude * Math.PI / 180.0; // Compute the angle. double angle = - Math.atan2( Math.sin( lon1 - lon2 ) * Math.cos( lat2 ), Math.cos( lat1 ) * Math.sin( lat2 ) - Math.sin( lat1 ) * Math.cos( lat2 ) * Math.cos( lon1 - lon2 ) ); if (angle < 0.0) angle += Math.PI * 2.0; // And convert result to degrees. angle = angle * degreesPerRadian; return angle; } /** * TODO * This class is to handle the Aggregated detection */ private class AggregatedIODetector extends AsyncTask<String, Void, String> { private DetectionProfile lightProfile[]; private DetectionProfile cellProfile[]; private DetectionProfile magnetProfile[]; private double[] normalizedProbablities; private double[] featureValues; @SuppressLint({ "UseSparseArrays", "SimpleDateFormat" }) @Override protected String doInBackground(String... param) { cellTowerChart.updateProfile();//get the cell info at time = 0 for(int i=0;i<10;i++){//get the value for the magnet at the interval of 1s for 10s try { magnetChart.updateProfile(); Thread.sleep(1000); } catch (Exception e) { } } //time = 10s lightProfile = lightChart.getProfile();//get the result from the light sensor magnetProfile = magnetChart.getProfile();//get the result from the magnet cellProfile = cellTowerChart.getProfile();//get the result from the cell tower /** * Weighted Average to combine different indicators */ /*normalizedProbablities=new double[3];//indoor, semi, outdoor Log.i("profile", "light indoor " + lightProfile[0].getConfidence() + " semi " + lightProfile[1].getConfidence() + " outdoor " + lightProfile[2].getConfidence()); Log.i("profile","magnet indoor " + magnetProfile[0].getConfidence() + " semi " + magnetProfile[1].getConfidence() + " outdoor " + magnetProfile[2].getConfidence()); Log.i("profile","cell indoor " + cellProfile[0].getConfidence() + " semi " + cellProfile[1].getConfidence() + " outdoor " + cellProfile[2].getConfidence()); for(int i=0;i<normalizedProbablities.length;i++){ //Aggregate the result normalizedProbablities[i] = lightProfile[i].getConfidence()*Constants.IODETECTOR_WEIGHT_LIGHT + magnetProfile[i].getConfidence()*Constants.IODETECTOR_WEIGHT_MAGNET + cellProfile[i].getConfidence()*Constants.IODETECTOR_WEIGHT_CELLULAR; } double sum=0; for(int i=0;i<normalizedProbablities.length;i++) sum+=normalizedProbablities[i]; for(int i=0;i<normalizedProbablities.length;i++) normalizedProbablities[i]/=sum;*/ /** * Bayesian Data Fusion */ int[] outcomes={Constants.ENVIRON_INDOOR, Constants.ENVIRON_OUTDOOR}; HashMap<Integer, ArrayList<Double>> vectorsToBeFused=new HashMap<Integer, ArrayList<Double>>(); ArrayList<Double> lightVector=new ArrayList<Double>(); ArrayList<Double> RSSVector=new ArrayList<Double>(); ArrayList<Double> magneticVector=new ArrayList<Double>(); Calendar calendar = Calendar.getInstance(); featureValues=new double[3]; if(lightChart.getLigthValue()>0){//not blocked int hour = calendar.get(Calendar.HOUR_OF_DAY); if(hour>=8 && hour<=17) vectorsToBeFused.put(Constants.INDICATOR_LIGHT_DAY, lightVector); else vectorsToBeFused.put(Constants.INDICATOR_LIGHT_NIGHT, lightVector); lightVector.add((double)lightChart.getLigthValue()); featureValues[0]=lightVector.get(0); } vectorsToBeFused.put(Constants.INDICATOR_RSS, RSSVector); RSSVector.add(cellTowerChart.currentASU); featureValues[1]=RSSVector.get(0); vectorsToBeFused.put(Constants.INDICATOR_MAGNETIC, magneticVector); magneticVector.add(magnetChart.magnetVariation); featureValues[2]=magneticVector.get(0); normalizedProbablities=mFusionManager.BayesianFusion(outcomes, vectorsToBeFused,Constants.HIGH_LEVEL_ACTIVITY_IODOOR, mLogManager); Log.d(LOG_TAG, "Baysian fusion Environment: "+Arrays.toString(normalizedProbablities)); //For logging purposes only SharedPreferences sp=getSharedPreferences(Constants.SHARED_PREFERENCES, 0); boolean logEnvironOn=sp.getBoolean(Constants.LOGGING_ENVIRON_SWITCH, false); boolean logOn=sp.getBoolean(Constants.LOGGING_ON, false); if(logOn&&logEnvironOn){ mLogManager.log( new SimpleDateFormat("HH:mm:ss").format(new Date(System.currentTimeMillis()))+","+ lightChart.getLigthValue()+","+magnetChart.magnetVariation+","+cellTowerChart.currentASU , Constants.LOG_FILE_TYPE[Constants.LOG_TYPE_ENVIRONMENT]); } return null; } //After calculation has been done, post the result to the user @Override protected void onPostExecute(String result2) { if(normalizedProbablities[0] > normalizedProbablities[1] // && normalizedProbablities[0] >= normalizedProbablities[1] ){//Indoor lastEnvironment =Constants.ENVIRON_INDOOR;//updating the condition for the comparison graph probabilityOfLastEnvironment=normalizedProbablities[0]; //notifyUser(view ,"You are in indoor",R.drawable.indoor_icon, 1);//triggering the notification cellTowerChart.setPrevStatus(0);//set the status for the cell tower, to be used for checking previous status when unchanged. }else{ /*if (normalizedProbablities[1] >normalizedProbablities[0] && normalizedProbablities[1] > normalizedProbablities[2]){//Semi outdoor lastEnvironment =Constants.ENVIRON_SEMI_OUTDOOR; probabilityOfLastEnvironment=normalizedProbablities[1]; cellTowerChart.setPrevStatus(1); }else{//Outdoor */ lastEnvironment = Constants.ENVIRON_OUTDOOR; probabilityOfLastEnvironment=normalizedProbablities[1]; cellTowerChart.setPrevStatus(2); //} } if(pastEnvironments.size()==Constants.NO_OF_PAST_STATES_STORED){ pastEnvironments.remove(0); } pastEnvironments.add(lastEnvironment); String environText=ENVIRONMENT_PREFIX+CommonUtils.eventCodeToString(lastEnvironment); if(Constants.IS_DEBUG){ for(int i=0;i<normalizedProbablities.length;i++){ environText+=" "+String.format("%.2f", normalizedProbablities[i]); } } environTextView.setText(environText+" " +"light:"+String.format("%.1f", featureValues[0]) + ", RSS:"+String.format("%.1f", featureValues[1])); aggregationFinish = true;//calculation finish } } @Override protected void onResume() { super.onResume(); mGoogleApiClient.connect(); } protected void onStart() { super.onStart(); mGoogleApiClient.connect(); } protected void onStop() { super.onStop(); // mGoogleApiClient.disconnect(); } @Override protected void onPause() { super.onPause(); } @Override protected void onDestroy(){ super.onDestroy(); mGoogleApiClient.disconnect(); mSensorManageForMap.unregisterListener(mSensorListnerForMap); } /* * Create the menu */ @Override public boolean onCreateOptionsMenu(Menu menu) { //displayParkingInfo(); MenuInflater inflater = getMenuInflater(); inflater.inflate(R.menu.menu, menu); return true; } /** * Handle Performance Tuning Click */ private void handleAdvancedSetting(){ final Dialog dialog = new Dialog(this); dialog.setTitle(R.string.menu_item_advanced_settings); dialog.setContentView(R.layout.advanced_setting); final SharedPreferences mPrefs = getSharedPreferences(Constants.SHARED_PREFERENCES, Context.MODE_PRIVATE); final SharedPreferences.Editor editor=mPrefs.edit(); final ToggleButton classifierForCIVOnButton=(ToggleButton)dialog.findViewById(R.id.civ_classifier_on); classifierForCIVOnButton.setChecked(mPrefs.getBoolean(Constants.PREFERENCE_KEY_CIV_CLASSIFIER_ON, false)); final ToggleButton isOutdoorButton=(ToggleButton)dialog.findViewById(R.id.is_outdoor); isOutdoorButton.setChecked(mPrefs.getBoolean(Constants.PREFERENCE_KEY_IS_OUTDOOR, false)); final EditText notificationTresholdText=(EditText)dialog.findViewById(R.id.notification_threshold); notificationTresholdText.setText(String.format("%.2f", mPrefs.getFloat(Constants.PREFERENCE_KEY_NOTIFICATION_THRESHOLD, (float)Constants.DEFAULT_DETECTION_THRESHOLD)) ); //final EditText detectionIntervalText=(EditText)dialog.findViewById(R.id.detection_interval); //detectionIntervalText.setText(String.valueOf(mPrefs.getInt(Constants.PREFERENCE_KEY_DETECTION_INTERVAL, Constants.DETECTION_INTERVAL_DEFAULT_VALUE) )); final EditText googleActivityUpdateIntervalText=(EditText)dialog.findViewById(R.id.google_activity_update_interval); googleActivityUpdateIntervalText.setText( String.valueOf(mPrefs.getInt(Constants.PREFERENCE_KEY_GOOGLE_ACTIVITY_UPDATE_INTERVAL, Constants.GOOGLE_ACTIVITY_UPDATE_INTERVAL_DEFAULT_VALUE)) ); //final ToggleButton useGoogleActivityInFusion=(ToggleButton)dialog.findViewById(R.id.use_google_for_motion_state_in_fusion); //useGoogleActivityInFusion.setChecked(mPrefs.getBoolean(Constants.PREFERENCE_KEY_USE_GOOGLE_ACTIVITY_IN_FUSION, false)); final ToggleButton logAcclRawButton=(ToggleButton)dialog.findViewById(R.id.log_raw_switch); logAcclRawButton.setChecked(mPrefs.getBoolean(Constants.LOGGING_ACCL_RAW_SWITCH, false)); final ToggleButton logAcclFeaturesButton=(ToggleButton)dialog.findViewById(R.id.log_accl_features_switch); logAcclFeaturesButton.setChecked(mPrefs.getBoolean(Constants.LOGGING_ACCL_FEATURES_SWITCH, false)); final ToggleButton logDetectionButton=(ToggleButton)dialog.findViewById(R.id.log_report_switch); logDetectionButton.setChecked(mPrefs.getBoolean(Constants.LOGGING_DETECTION_SWITCH, false)); final ToggleButton logErrorButton=(ToggleButton)dialog.findViewById(R.id.log_error_switch); logErrorButton.setChecked(mPrefs.getBoolean(Constants.LOGGING_ERROR_SWITCH, true)); //final EditText deltaForConditionalProb=(EditText)dialog.findViewById(R.id.normal_dist_delta); //deltaForConditionalProb.setText(String.valueOf(mPrefs.getFloat(Constants.CIV_DELTA_CONDITIONAL_PROBABILITY, 2)) ); final Button applyButton = (Button) dialog.findViewById(R.id.performance_apply_button); final Button cancelButton = (Button) dialog.findViewById(R.id.peformance_cancel_button); applyButton.setOnClickListener(new View.OnClickListener() { @Override public void onClick(final View v) { if (classifierForCIVOnButton.isChecked()) editor.putBoolean(Constants.PREFERENCE_KEY_CIV_CLASSIFIER_ON, true); else editor.putBoolean(Constants.PREFERENCE_KEY_CIV_CLASSIFIER_ON, false); if (isOutdoorButton.isChecked()) editor.putBoolean(Constants.PREFERENCE_KEY_IS_OUTDOOR, true); else editor.putBoolean(Constants.PREFERENCE_KEY_IS_OUTDOOR, false); if (logAcclRawButton.isChecked()) editor.putBoolean(Constants.LOGGING_ACCL_RAW_SWITCH, true); else editor.putBoolean(Constants.LOGGING_ACCL_RAW_SWITCH, false); if (logAcclFeaturesButton.isChecked()) editor.putBoolean(Constants.LOGGING_ACCL_FEATURES_SWITCH, true); else editor.putBoolean(Constants.LOGGING_ACCL_FEATURES_SWITCH, false); if (logDetectionButton.isChecked()) editor.putBoolean(Constants.LOGGING_DETECTION_SWITCH, true); else editor.putBoolean(Constants.LOGGING_DETECTION_SWITCH, false); if (logErrorButton.isChecked()) editor.putBoolean(Constants.LOGGING_ERROR_SWITCH, true); else editor.putBoolean(Constants.LOGGING_ERROR_SWITCH, false); float notificationTreshold; try{ notificationTreshold=Float.parseFloat( notificationTresholdText.getText().toString()); }catch(Exception ex){ notificationTreshold=(float)Constants.DEFAULT_DETECTION_THRESHOLD; } editor.putFloat(Constants.PREFERENCE_KEY_NOTIFICATION_THRESHOLD, notificationTreshold); /*int detectionInterval; try{ detectionInterval=Integer.parseInt( detectionIntervalText.getText().toString()); }catch(Exception ex){ detectionInterval=Constants.DETECTION_INTERVAL_DEFAULT_VALUE; } editor.putInt(Constants.PREFERENCE_KEY_DETECTION_INTERVAL, detectionInterval);*/ /*if (useGoogleActivityInFusion.isChecked()) editor.putBoolean(Constants.PREFERENCE_KEY_USE_GOOGLE_ACTIVITY_IN_FUSION, true); else editor.putBoolean(Constants.PREFERENCE_KEY_USE_GOOGLE_ACTIVITY_IN_FUSION, false);*/ int googleActivityUpdateInterval; try{ googleActivityUpdateInterval=Integer.parseInt( googleActivityUpdateIntervalText.getText().toString()); }catch(Exception ex){ googleActivityUpdateInterval=Constants.GOOGLE_ACTIVITY_UPDATE_INTERVAL_DEFAULT_VALUE; } editor.putInt(Constants.PREFERENCE_KEY_GOOGLE_ACTIVITY_UPDATE_INTERVAL, googleActivityUpdateInterval); /*try{ Float delta=Float.parseFloat(deltaForConditionalProb.getText().toString()); editor.putFloat(Constants.CIV_DELTA_CONDITIONAL_PROBABILITY, delta); }catch(Exception ex){ Toast.makeText(getApplicationContext(), "Input must be a float number", Toast.LENGTH_SHORT).show(); }*/ editor.commit(); dialog.cancel(); } }); cancelButton.setOnClickListener(new View.OnClickListener() { @Override public void onClick(final View v) { dialog.cancel(); } }); dialog.show(); } /** * Handle Setting click */ private void handleSettings() { final Dialog dialog = new Dialog(this); dialog.setTitle(R.string.menu_item_settings); dialog.setContentView(R.layout.settings); final SharedPreferences mPrefs = getSharedPreferences(Constants.SHARED_PREFERENCES, Context.MODE_PRIVATE); final SharedPreferences.Editor editor=mPrefs.edit(); final ToggleButton logOnButton=(ToggleButton)dialog.findViewById(R.id.log_on); logOnButton.setChecked(mPrefs.getBoolean(Constants.LOGGING_ON, false)); final Button btDeviceSelectButton=(Button)dialog.findViewById(R.id.bt_device_button); btDeviceSelectButton.setOnClickListener(new View.OnClickListener() { @Override public void onClick(final View v) { if(mBluetoothAdapter.isEnabled()){ selectBluetoothDevice(); }else{ Toast.makeText(getApplicationContext(), "Please enable your Bluetooth first.", Toast.LENGTH_SHORT).show(); } } }); final Button applyButton = (Button) dialog.findViewById(R.id.apply_button); final Button cancelButton = (Button) dialog.findViewById(R.id.cancel_button); applyButton.setOnClickListener(new View.OnClickListener() { @Override public void onClick(final View v) { if (logOnButton.isChecked()) editor.putBoolean(Constants.LOGGING_ON, true); else editor.putBoolean(Constants.LOGGING_ON, false); editor.commit(); dialog.cancel(); } }); cancelButton.setOnClickListener(new View.OnClickListener() { @Override public void onClick(final View v) { dialog.cancel(); } }); dialog.show(); } /* * Handle selections from the menu */ @Override public boolean onOptionsItemSelected(MenuItem item) { // Handle item selection switch (item.getItemId()) { /* // Clear the log display and remove the log files case R.id.menu_item_clearlog: return true; // Display the update log case R.id.menu_item_showlog: // Continue by passing true to the menu handler return true;*/ case R.id.menu_item_settings: handleSettings(); return true; case R.id.menu_item_showSensors: Intent i= new Intent(MainActivity.this, Sensors.class); startActivity(i); return true; case R.id.menu_item_advanced_settings: handleAdvancedSetting(); return true; case R.id.menu_item_show_route: Location mLastLocation = LocationServices.FusedLocationApi.getLastLocation( mGoogleApiClient); new SearchParkAsyncTask(mLastLocation).execute(); return true; case R.id.menu_item_ping_server: PingServer ps = new PingServer(); ps.execute(); return true; case R.id.menu_item_pdp: int st = reportParkDepark.getVisibility(); if(st==View.INVISIBLE) { reportParkDepark.setVisibility(View.VISIBLE); }else{ reportParkDepark.setVisibility(View.INVISIBLE); } return true; // For any other choice, pass it to the super() case R.id.menu_item_stop_routing: // mGoogleApiClient.disconnect(); isParked = true; mMap.clear(); default: return super.onOptionsItemSelected(item); } } /** Make sure that GPS is enabled */ public void checkGPSEnabled() { if ( !mLocationManager.isProviderEnabled(LocationManager.GPS_PROVIDER) ) { Log.e(LOG_TAG, "GPS not enabled yet"); /** Ask user to enable GPS */ final AlertDialog enableGPS = new AlertDialog.Builder(this) .setTitle(Constants.APP_NAME+ " needs access to GPS. Please enable GPS.") .setPositiveButton("Press here to enable GPS", new DialogInterface.OnClickListener() { public void onClick(final DialogInterface dialog, final int id) { startActivityForResult(new Intent(android.provider.Settings.ACTION_LOCATION_SOURCE_SETTINGS), Constants.SENSOR_GPS); } }) .setCancelable(false) .create(); /*.setNegativeButton("Skip", new DialogInterface.OnClickListener() { public void onClick(@SuppressWarnings("unused") final DialogInterface dialog, @SuppressWarnings("unused") final int id) { } })*/ enableGPS.show(); }else{ Log.e(LOG_TAG, "GPS already enabled"); //GPS already enabled checkBluetoothEnabled(); } } /** Make sure that Bluetooth is enabled */ public void checkBluetoothEnabled() { if (mBluetoothAdapter == null) { // Device does not support Bluetooth AlertDialog noBluetoothAlert = new AlertDialog.Builder(this) .setTitle("Bluetooth not supported.") .setPositiveButton("Exit", new DialogInterface.OnClickListener() { public void onClick(final DialogInterface dialog, final int id) { } }) .setCancelable(true).create(); noBluetoothAlert.show(); writeToConsole("This phone does not have Bluetooth capability. Bluetooth connection method will not work."); return; } if (!mBluetoothAdapter.isEnabled()) { Log.e(LOG_TAG, "bluetooth not enabled yet"); /** Ask user to enable Bluetooth */ AlertDialog enableBluetoothDialog = new AlertDialog.Builder(this) .setTitle("Please enable Bluetooth on your phone.") .setCancelable(false) .setPositiveButton("Enable Bluetooth", new DialogInterface.OnClickListener() { public void onClick( final DialogInterface dialog, final int id) { startActivityForResult(new Intent(BluetoothAdapter.ACTION_REQUEST_ENABLE), Constants.SENSOR_BLUETOOTH); } }) .setNegativeButton("Skip", new DialogInterface.OnClickListener() { public void onClick( final DialogInterface dialog,final int id) {} }).create(); enableBluetoothDialog.show(); } else { selectInitBtDevice(); } } /* * Handle results returned to this Activity by other Activities started with * startActivityForResult(). In particular, the method onConnectionFailed() in * DetectionRemover and DetectionRequester may call startResolutionForResult() to * start an Activity that handles Google Play services problems. The result of this * call returns here, to onActivityResult. */ @Override protected void onActivityResult(int requestCode, int resultCode, Intent intent) { // Choose what to do based on the request code Log.e(LOG_TAG, requestCode+" "+requestCode); switch (requestCode) { case Constants.SENSOR_GPS: checkBluetoothEnabled(); break; case Constants.SENSOR_BLUETOOTH: if(mBluetoothAdapter.isEnabled()){//only if the user enables the bluetooth checkBluetoothEnabled(); } break; case Constants.MY_DATA_CHECK_CODE: if (resultCode == TextToSpeech.Engine.CHECK_VOICE_DATA_PASS) { //the user has the necessary data - create the TTS //myTTS = new TextToSpeech(this, this); } else { //no data - install it now Intent installTTSIntent = new Intent(); installTTSIntent.setAction(TextToSpeech.Engine.ACTION_INSTALL_TTS_DATA); startActivity(installTTSIntent); } break; // If the request code matches the code sent in onConnectionFailed case Constants.CONNECTION_FAILURE_RESOLUTION_REQUEST: switch (resultCode) { // If Google Play services resolved the problem case Activity.RESULT_OK: // If the request was to start activity recognition updates if (Constants.REQUEST_TYPE.ADD == mRequestType) { // Restart the process of requesting activity recognition // updates mGoogleActivityDetectionRequester.requestUpdates(); // If the request was to remove activity recognition updates } else if (Constants.REQUEST_TYPE.REMOVE == mRequestType) { /* * Restart the removal of all activity recognition updates * for the PendingIntent. */ mGoogleActivityDetectionRemover.removeUpdates(mGoogleActivityDetectionRequester .getRequestPendingIntent()); } break; // If any other result was returned by Google Play services default: // Report that Google Play services was unable to resolve the // problem. Log.d(Constants.APP_NAME, getString(R.string.no_resolution)); } // If any other request code was received default: // Report that this Activity received an unknown requestCode Log.d(Constants.APP_NAME, getString(R.string.unknown_activity_request_code, requestCode)); break; } } public void selectInitBtDevice() { SharedPreferences sharedPreferences=getSharedPreferences(Constants.SHARED_PREFERENCES, Context.MODE_PRIVATE); final String targetDeviceName = sharedPreferences.getString(Constants.BLUETOOTH_CAR_DEVICE_NAME, null); if(targetDeviceName != null){ AlertDialog bt_change = new AlertDialog.Builder(this) .setTitle("Your Car Bluetooth Device selected as "+targetDeviceName) .setPositiveButton("CONFIRM",new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { // TODO Auto-generated method stub Toast.makeText(getApplicationContext(), "bluetooth service started for "+targetDeviceName, Toast.LENGTH_LONG).show(); Intent intent = new Intent(MainActivity.this, BluetoothConnectionService.class); startService(intent); } }) .setNegativeButton("CHANGE", new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { // TODO Auto-generated method stub selectBluetoothDevice(); } }).create(); bt_change.show(); } else{ selectBluetoothDevice(); } } private String selectedBloothDeviceName=null; public void selectBluetoothDevice() { Set<BluetoothDevice> bluetoothDevices=mBluetoothAdapter.getBondedDevices(); final CharSequence[] listItems = new CharSequence[bluetoothDevices.size()]; int i=0; for (BluetoothDevice device : mBluetoothAdapter.getBondedDevices()) { String device_name = device.getName(); listItems[i++]=device_name; } AlertDialog select=new AlertDialog.Builder(this) .setTitle(R.string.set_bluetooth_message) .setSingleChoiceItems(listItems, -1, new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int whichButton) { Log.e(LOG_TAG, "id="+whichButton); if(whichButton>=0) selectedBloothDeviceName=listItems[whichButton].toString(); } }) .setPositiveButton("OK", new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int whichButton) { Log.e(LOG_TAG, selectedBloothDeviceName); Toast.makeText(getApplicationContext(), getString(R.string.bluetooth_device_selected, selectedBloothDeviceName) , Toast.LENGTH_SHORT).show(); final SharedPreferences mPrefs = getSharedPreferences(Constants.SHARED_PREFERENCES, Context.MODE_PRIVATE); SharedPreferences.Editor editor=mPrefs.edit(); editor.putString(Constants.BLUETOOTH_CAR_DEVICE_NAME, selectedBloothDeviceName); editor.commit(); Intent intent = new Intent(MainActivity.this, BluetoothConnectionService.class); startService(intent); } }) .setNegativeButton("Cancel", new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int whichButton) { } }) .create(); select.show(); } /** Write a string to output console */ public void writeToConsole(String str) { consoleTextView.append(str); final Layout layout = consoleTextView.getLayout(); if(layout != null){ int scrollDelta = layout.getLineBottom(consoleTextView.getLineCount() - 1) - consoleTextView.getScrollY() - consoleTextView.getHeight(); if(scrollDelta > 0) consoleTextView.scrollBy(0, scrollDelta); } } private void onDetectionConfirmed(int eventCode, Location location, String address){ int resID; String prefix; float markerColor; if(eventCode==Constants.OUTCOME_PARKING){ resID=R.raw.vehicle_parked; prefix=Constants.PARKING_NOTIFICATION; markerColor=BitmapDescriptorFactory.HUE_AZURE; }else{//unparking resID=R.raw.vehicle_deparked; prefix=Constants.UNPARKING_NOTIFICATION; markerColor=BitmapDescriptorFactory.HUE_RED; } //String curTimeString=CommonUtils.formatTimestamp(new Date(),formatTemplate); String curTimeString=CommonUtils.formatTimestamp( new Date(location.getTime()), "HH:mm:ss " ); Log.e(LOG_TAG, curTimeString+" \n"+location.toString() ); /* * actions */ //1. send the text notification String notificationMsg=prefix+" "+curTimeString; if(address!=null) notificationMsg+=address; mEventDetectionNotificationManager.sendTextNotification(notificationMsg); //2. play the sound //mEventDetectionNotificationManager.playVoiceNotification(resID); reportGlobalNumber++; if (resID==R.raw.vehicle_parked) { mSpeech.speak(reportGlobalNumber + " Fusion detected parking at " + curTimeString, TextToSpeech.QUEUE_ADD, null); Toast.makeText(getApplicationContext(), "Fusion detected leaving parking space at " + curTimeString, Toast.LENGTH_LONG).show(); } if (resID==R.raw.vehicle_deparked) { mSpeech.speak(reportGlobalNumber + " Fusion detected leaving parking space at " + curTimeString, TextToSpeech.QUEUE_ADD, null); Toast.makeText(getApplicationContext(), "Fusion detected leaving parking space at " + curTimeString, Toast.LENGTH_LONG).show(); } //3. log the address of event String logMsg=prefix+"\nNotification generation time:"+curTimeString+"\nlocation:"+location.toString()+"\n"; if(address!=null){ logMsg+=address+"\n"; logMsg+=pastEnvironments.toString()+"\n" +mPastClassifiedMotionStates.toString()+"\n" +mPastGoogleActivities.toString()+"\n"; } boolean logDetection=getSharedPreferences(Constants.SHARED_PREFERENCES, 0).getBoolean(Constants.LOGGING_DETECTION_SWITCH, false); if(logDetection) mLogManager.log(logMsg, Constants.LOG_FILE_TYPE[Constants.LOG_TYPE_DETECTION_REPORT]); //4. show on the map mMap.clear(); mEventDetectionNotificationManager.addMarkersToMap(mMap, curTimeString, prefix , location.getLatitude(), location.getLongitude(), location.getAltitude(), markerColor); //center and zoom in the map CameraPosition cameraPosition = new CameraPosition.Builder() .target(new LatLng(location.getLatitude(), location.getLongitude()) ) // Sets the center of the map to Mountain View .zoom(17) // Sets the zoom .bearing(location.getBearing()) // Sets the orientation of the camera to east .tilt(30) // Sets the tilt of the camera to 30 degrees .build(); // Creates a CameraPosition from the builder mMap.animateCamera(CameraUpdateFactory.newCameraPosition(cameraPosition)); //add a marker on the map Log.e(LOG_TAG, "operations on map completed"); //5. update availability display //updateAvailabilityDisplay(eventCode, location); //add a marker on the map Log.e(LOG_TAG, "operations on map completed"); //updateAvailabilityDisplay(eventCode, location); } // actions taken when a parking/unparking event is detected and the location of the event is retrieved private void onLocationRetrieved(int eventCode, Location location, String address){ //latestLocation=getLatestLocationFromIndividualProvider(location); String logMsg= (eventCode==Constants.OUTCOME_PARKING?Constants.PARKING_NOTIFICATION:Constants.UNPARKING_NOTIFICATION)+ "\nlocatoin retrieval time:"+CommonUtils.formatTimestamp( new Date(location.getTime()), "HH:mm:ss " )+"\nlocation:"+location.toString()+"\n"; if(address!=null){ logMsg+=address+"\n"; logMsg+=pastEnvironments.toString()+"\n" +mPastClassifiedMotionStates.toString()+"\n" +mPastGoogleActivities.toString()+"\n"; } boolean logDetection=getSharedPreferences(Constants.SHARED_PREFERENCES, 0).getBoolean(Constants.LOGGING_DETECTION_SWITCH, false); if(logDetection) mLogManager.log(logMsg, Constants.LOG_FILE_TYPE[Constants.LOG_TYPE_DETECTION_REPORT]); if(eventCode==Constants.OUTCOME_PARKING){//parking if(mPastGoogleActivities.isTransitionTo(MotionState.Type.ON_FOOT)){ onDetectionConfirmed(eventCode, location, address); }else{ CachedDetection cd=new CachedDetection(CachedDetection.Type.Parking, location, System.currentTimeMillis(), address); mCachedParkingDetectionList.add(cd); } }else{//unparking if(mPastGoogleActivities.isTransitionTo(MotionState.Type.IN_VEHICLE)){ onDetectionConfirmed(eventCode, location, address); }else{ CachedDetection cd=new CachedDetection(CachedDetection.Type.Unparking, location, System.currentTimeMillis(), address); mCachedUnparkingDetectionList.add(cd); } } } @Override public void onConnectionFailed(ConnectionResult arg0) { } /** * Legacy codes */ //private double calibration = 0.0; private double currentAcceleration; private double appliedAcceleration = 0; private Date lastUpdate; @SuppressWarnings("unused") private double calVelocityIncrease() { // Calculate how long this acceleration has been applied. Date timeNow = new Date(System.currentTimeMillis()); double timeDelta = timeNow.getTime()-lastUpdate.getTime(); lastUpdate.setTime(timeNow.getTime()); // Calculate the change in velocity // current acceleration since the last update. double deltaVelocity = appliedAcceleration * (timeDelta/1000); appliedAcceleration = currentAcceleration; // Add the velocity change to the current velocity. return deltaVelocity; } /** * Verify that Google Play services is available before making a request. * * @return true if Google Play services is available, otherwise false */ private boolean isGooglePlayServiceAvailable() { // Check that Google Play services is available int resultCode = GooglePlayServicesUtil.isGooglePlayServicesAvailable(this); // If Google Play services is available if (ConnectionResult.SUCCESS == resultCode) { // In debug mode, log the status Log.d(Constants.APP_NAME, getString(R.string.play_services_available)); // Continue return true; // Google Play services was not available for some reason } else { // Display an error dialog GooglePlayServicesUtil.getErrorDialog(resultCode, this, 0).show(); return false; } } /** * Respond to "Start" button by requesting activity recognition * updates. * @param view The view that triggered this method. */ public void startGoogleActivityRecognitionUpdates(View view) { // Check for Google Play services if (!isGooglePlayServiceAvailable()) { return; } /* * Set the request type. If a connection error occurs, and Google Play services can * handle it, then onActivityResult will use the request type to retry the request */ mRequestType = Constants.REQUEST_TYPE.ADD; // Pass the update request to the requester object mGoogleActivityDetectionRequester.requestUpdates(); } /** * Respond to "Stop" button by canceling updates. * @param view The view that triggered this method. */ public void stopGoogleActivityRecognitionUpdates(View view) { // Check for Google Play services if (!isGooglePlayServiceAvailable()) { return; } /* * Set the request type. If a connection error occurs, and Google Play services can * handle it, then onActivityResult will use the request type to retry the request */ mRequestType = Constants.REQUEST_TYPE.REMOVE; // Pass the remove request to the remover object mGoogleActivityDetectionRemover.removeUpdates(mGoogleActivityDetectionRequester.getRequestPendingIntent()); /* * Cancel the PendingIntent. Even if the removal request fails, canceling the PendingIntent * will stop the updates. */ PendingIntent pIntent=mGoogleActivityDetectionRequester.getRequestPendingIntent(); if(pIntent!=null) pIntent.cancel(); } /** * Display the activity detection history stored in the * log file */ /*private void updateActivityHistory() { // Try to load data from the history file try { // Load log file records into the List List<Spanned> activityDetectionHistory = mLogManager.loadLogFile(); // Clear the adapter of existing data mStatusAdapter.clear(); // Add each element of the history to the adapter for (Spanned activity : activityDetectionHistory) { mStatusAdapter.add(activity); } // If the number of loaded records is greater than the max log size if (mStatusAdapter.getCount() > Constants.MAX_LOG_SIZE) { AlertDialog.Builder builder = new AlertDialog.Builder(this); builder.setMessage("File is too large to be shown.") .setCancelable(true); final AlertDialog alert = builder.create(); alert.show(); // Delete the old log file if (!mLogFile.removeLogFiles()) { // Log an error if unable to delete the log file Log.e(Constants.APPTAG, getString(R.string.log_file_deletion_error)); } } // Trigger the adapter to update the display mStatusAdapter.notifyDataSetChanged(); // If an error occurs while reading the history file } catch (IOException e) { Log.e(Constants.APP_NAME, e.getMessage(), e); } }*/ public static Context getContext(){ return mContext; } /** * Broadcast receiver that receives activity update intents * It checks to see if the ListView contains items. If it * doesn't, it pulls in history. * This receiver is local only. It can't read broadcast Intents from other apps. */ BroadcastReceiver updateListReceiver = new BroadcastReceiver() { @Override public void onReceive(Context context, Intent intent) { /* * When an Intent is received from the update listener IntentService, update * the displayed log. */ //do not execute an update to avoid freezing the app //updateActivityHistory(); } }; @Override public void onInit(int status) { // TODO Auto-generated method stub } @Override public PendingResult<Status> acceptConnectionRequest(GoogleApiClient arg0, String arg1, byte[] arg2, MessageListener arg3) { // TODO Auto-generated method stub return null; } @Override public void disconnectFromEndpoint(GoogleApiClient arg0, String arg1) { // TODO Auto-generated method stub } @Override public String getLocalDeviceId(GoogleApiClient arg0) { // TODO Auto-generated method stub return null; } @Override public String getLocalEndpointId(GoogleApiClient arg0) { // TODO Auto-generated method stub return null; } @Override public PendingResult<Status> rejectConnectionRequest(GoogleApiClient arg0, String arg1) { // TODO Auto-generated method stub return null; } @Override public PendingResult<Status> sendConnectionRequest(GoogleApiClient arg0, String arg1, String arg2, byte[] arg3, ConnectionResponseCallback arg4, MessageListener arg5) { // TODO Auto-generated method stub return null; } @Override public void sendReliableMessage(GoogleApiClient arg0, String arg1, byte[] arg2) { // TODO Auto-generated method stub } @Override public void sendReliableMessage(GoogleApiClient arg0, List<String> arg1, byte[] arg2) { // TODO Auto-generated method stub } @Override public void sendUnreliableMessage(GoogleApiClient arg0, String arg1, byte[] arg2) { // TODO Auto-generated method stub } @Override public void sendUnreliableMessage(GoogleApiClient arg0, List<String> arg1, byte[] arg2) { // TODO Auto-generated method stub } @Override public PendingResult<StartAdvertisingResult> startAdvertising( GoogleApiClient arg0, String arg1, AppMetadata arg2, long arg3, ConnectionRequestListener arg4) { // TODO Auto-generated method stub return null; } @Override public PendingResult<Status> startDiscovery(GoogleApiClient arg0, String arg1, long arg2, EndpointDiscoveryListener arg3) { // TODO Auto-generated method stub return null; } @Override public void stopAdvertising(GoogleApiClient arg0) { // TODO Auto-generated method stub } @Override public void stopAllEndpoints(GoogleApiClient arg0) { // TODO Auto-generated method stub } @Override public void stopDiscovery(GoogleApiClient arg0, String arg1) { // TODO Auto-generated method stub } @Override public void onConnected(Bundle arg0) { // TODO Auto-generated method stub } @Override public void onConnectionSuspended(int arg0) { // TODO Auto-generated method stub } } class PastMotionStates{ public int capacity; public Source source; public HashMap<MotionState.Type, Integer> map; public ArrayList<MotionState.Type> list; public long timestampOfLastInVehicleState; public long timestampOfLastOnFootState; public static final long EXPIRATION_TIME_IN_MILLISEC=Constants.ONE_MINUTE+Constants.ONE_MINUTE/2; public PastMotionStates(Source source, int capacity) { this.source = source; this.capacity = capacity; map = new HashMap<MotionState.Type, Integer>(); list = new ArrayList<MotionState.Type>(); } public void clear(){ map.clear(); list.clear(); } public void add(MotionState.Type state) { if (list.size() == capacity) { MotionState.Type removedMotionType = list.remove(0);// remove the oldest state map.put(removedMotionType, map.get(removedMotionType) - 1); } list.add(state); if (!map.containsKey(state)) map.put(state, 0); map.put(state, map.get(state) + 1); } public void removeAll(MotionState.Type state) { while(list.remove(state)); map.remove(state); } public boolean isTransitionTo(MotionState.Type state){ if(state!=MotionState.Type.IN_VEHICLE&&state!=MotionState.Type.ON_FOOT) return false; boolean ret=containsAtLeastMOnFootAndAtLeastNInVehicleStates(1, 1)&&containsOnlyOneAndLater(state); if(ret){ if(state==MotionState.Type.IN_VEHICLE) removeAll(MotionState.Type.ON_FOOT); else removeAll(MotionState.Type.IN_VEHICLE); } return ret; } public boolean containsAtLeastMOnFootAndAtLeastNInVehicleStates(int mOnFoot, int nInVehicle) { // return false if the filter fails if (!map.containsKey(MotionState.Type.ON_FOOT) || !map.containsKey(MotionState.Type.IN_VEHICLE)) return false; int walkingCnt = map.get(MotionState.Type.ON_FOOT); int drivingCnt = map.get(MotionState.Type.IN_VEHICLE); // Log.e(LOG_TAG,"#Walk="+walkingCnt+" #Drive="+drivingCnt); if (walkingCnt < mOnFoot || drivingCnt < nInVehicle) return false; return true; } //Type equals to either On_foot or In_vehicle public boolean containsOnlyOneAndLater(MotionState.Type type) { if (!map.containsKey(type)||map.get(type)!=1) return false; for(int i=list.size()-1;i>=0;i--){ MotionState.Type curType=list.get(i); if(curType!=MotionState.Type.ON_FOOT&&curType!=MotionState.Type.IN_VEHICLE) continue; if(curType==type) return true; else return false; } return false; } public String toString() { String ret = list.toString() + "\n"; for (Type type : map.keySet()) ret += type.toString() + ":" + map.get(type) + " "; return ret; } } class MotionState { public enum Source { Google, Classifier; } public enum Type { ON_FOOT("On_Foot"), IN_VEHICLE("In_Vehicle"), STILL("Still"), UNKNOWN( "Unknown"), ON_BIKE("On_Bike"), OTHER("Other"); private String typeString; private Type(String type) { this.typeString = type; } public String toString() { return typeString; } } public Source source; public Type type; public int secondOfDay; public static MotionState.Type translate(String predClass) { MotionState.Type ret; if ("Walking".equals(predClass)) { ret=MotionState.Type.ON_FOOT; } else { if ("Driving".equals(predClass)) ret=MotionState.Type.IN_VEHICLE; else { if ("Still".equals(predClass)) ret=MotionState.Type.STILL; else ret=MotionState.Type.OTHER; } } return ret; } public static MotionState.Type translate(int activityTypeDefinedByGoogle) { MotionState.Type ret; switch (activityTypeDefinedByGoogle) { case DetectedActivity.ON_FOOT: ret=MotionState.Type.ON_FOOT; break; case DetectedActivity.IN_VEHICLE: ret=MotionState.Type.IN_VEHICLE; break; case DetectedActivity.STILL: ret=MotionState.Type.STILL; break; case DetectedActivity.ON_BICYCLE: ret=MotionState.Type.ON_BIKE; default: ret=MotionState.Type.UNKNOWN; break; } return ret; } } class CachedDetection{ public enum Type{ Parking, Unparking } public long timestamp; public Location location; public String address; public Type type; public static final long EXPIRATION_TIME=Constants.ONE_MINUTE; public CachedDetection(Type type, Location loc, long time, String address){ timestamp=time; location=loc; this.type=type; this.address=address; } } class CachedDetectionList{ CachedDetection.Type type; ArrayList<CachedDetection> list; public CachedDetectionList(CachedDetection.Type type) { this.type=type; list=new ArrayList<CachedDetection>(); } public void removeExpiredCachedDetection(){ //remove expired cached detections long curtime=System.currentTimeMillis(); int i; ArrayList<CachedDetection> newList=new ArrayList<CachedDetection>(); for(i=0;i<list.size();i++){ if(curtime-list.get(i).timestamp<=CachedDetection.EXPIRATION_TIME){ newList.add(list.get(i)); } } list=newList; } public void add(CachedDetection cd){ removeExpiredCachedDetection(); //add the new one list.add(cd); } public CachedDetection get(int index){ removeExpiredCachedDetection(); if(index<0||index>=list.size()) return null; return list.get(index); } }
app/src/main/java/com/uic/sandeep/phonepark/MainActivity.java
package com.uic.sandeep.phonepark; import android.annotation.SuppressLint; import android.app.*; import android.app.PendingIntent; import android.bluetooth.BluetoothAdapter; import android.bluetooth.BluetoothDevice; import android.content.*; import android.graphics.*; import android.hardware.*; import android.location.*; import android.location.LocationManager; import android.net.ConnectivityManager; import android.net.NetworkInfo; import android.os.*; import android.provider.Settings; import android.speech.tts.TextToSpeech; import android.support.multidex.MultiDex; import android.support.v4.app.FragmentActivity; import android.support.v4.content.LocalBroadcastManager; import android.text.Layout; import android.text.method.ScrollingMovementMethod; import android.util.Log; import android.view.*; import android.widget.*; import com.google.android.gms.common.ConnectionResult; import com.google.android.gms.common.GooglePlayServicesUtil; import com.google.android.gms.common.api.GoogleApiClient; import com.google.android.gms.common.api.PendingResult; import com.google.android.gms.common.api.Status; import com.google.android.gms.location.DetectedActivity; import com.google.android.gms.location.LocationListener; import com.google.android.gms.location.LocationRequest; import com.google.android.gms.location.LocationServices; import com.google.android.gms.maps.CameraUpdateFactory; import com.google.android.gms.maps.GoogleMap; import com.google.android.gms.maps.MapFragment; import com.google.android.gms.maps.OnMapReadyCallback; import com.google.android.gms.maps.model.BitmapDescriptorFactory; import com.google.android.gms.maps.model.CameraPosition; import com.google.android.gms.maps.model.LatLng; import com.google.android.gms.maps.model.Marker; import com.google.android.gms.maps.model.MarkerOptions; import com.google.android.gms.maps.model.Polygon; import com.google.android.gms.maps.model.PolygonOptions; import com.google.android.gms.maps.model.Polyline; import com.google.android.gms.maps.model.PolylineOptions; import com.google.android.gms.nearby.connection.AppMetadata; import com.google.android.gms.nearby.connection.Connections; import com.skyhookwireless.wps.WPSContinuation; import com.skyhookwireless.wps.WPSLocation; import com.skyhookwireless.wps.WPSLocationCallback; import com.skyhookwireless.wps.WPSReturnCode; import com.uic.sandeep.phonepark.MotionState.Source; import com.uic.sandeep.phonepark.MotionState.Type; import com.uic.sandeep.phonepark.blocksmap.ParkingBlock; import com.uic.sandeep.phonepark.bluetooth.BTPendingDetection; import com.uic.sandeep.phonepark.bluetooth.BluetoothConnectionService; import com.uic.sandeep.phonepark.classification.ClassificationManager; import com.uic.sandeep.phonepark.classification.WekaClassifier; import com.uic.sandeep.phonepark.fusion.FusionManager; import com.uic.sandeep.phonepark.googleacitvityrecognition.GoogleActivityRecognitionClientRemover; import com.uic.sandeep.phonepark.googleacitvityrecognition.GoogleActivityRecognitionClientRequester; import com.uic.sandeep.phonepark.indicator.accelerometerbased.AccelerometerFeature; import com.uic.sandeep.phonepark.indicator.iodetectors.CellTowerChart; import com.uic.sandeep.phonepark.indicator.iodetectors.DetectionProfile; import com.uic.sandeep.phonepark.indicator.iodetectors.LightChart; import com.uic.sandeep.phonepark.indicator.iodetectors.MagnetChart; import com.uic.sandeep.phonepark.managers.AudioRecordManager; import com.uic.sandeep.phonepark.managers.EventDetectionNotificationManager; import com.uic.sandeep.phonepark.managers.LogManager; import com.uic.sandeep.phonepark.managers.WakeLockManager; import com.uic.sandeep.phonepark.sensorlist.Sensors; import java.io.IOException; import java.text.SimpleDateFormat; import java.util.*; /** *Sample application that demonstrates the use of *ActivityRecognitionClient}.It registers for activity detection updates *at a rate of 20seconds,logs them to a file,and displays the detected *activities with their associated confidence levels. *<p> *An IntentService receives activity detection updates in the background *so that detection can continue even if the Activity is not visible. */ public class MainActivity extends FragmentActivity implements Connections, GoogleApiClient.OnConnectionFailedListener, TextToSpeech.OnInitListener, GoogleApiClient.ConnectionCallbacks, OnMapReadyCallback { private static List<LatLng> PBRoutes; public static void setPBRoutes(List<LatLng> PBRoutes) { MainActivity.PBRoutes = PBRoutes; } protected void attachBaseContext(Context base) { super.attachBaseContext(base); MultiDex.install(this); } // Google Api Client public static GoogleApiClient mGoogleApiClient; public static boolean isParked = false; /** * Stores parameters for requests to the FusedLocationProviderApi. */ protected LocationRequest mLocationRequest; private static Context mContext; protected LocationRequest mParkingLocationRequest; // Unique ID for the User public static String userID; public static final String LOG_TAG=MainActivity.class.getCanonicalName(); private static final String LOCK_TAG="ACCELEROMETER_MONITOR"; private BTPendingDetection pendingBTDetection = null; private int currentTransportationMode = DetectedActivity.UNKNOWN; private boolean onCreateCalled = false; /** * UI Widgets */ // Holds the text view public static TextView text_navigation; public static TextView text_parking_info; public static ImageButton reportParkDepark; private static TextView consoleTextView, environTextView, stateTextView, googleStateTextView; public static final String ENVIRONMENT_PREFIX="Environment : "; public static final String STATE_PREFIX="Motion State Classified : "; public static final String GOOGLE_MOBILITY_STATE_PREFIX="Motion State Google : "; public static final String INDICATOR_PREFIX="Indicator : "; public static GoogleMap mMap; public static Polyline currentPolyline = null; public static Marker[] currentMarkers = null; public static TextToSpeech mSpeech; static Vector<ParkingBlock> parkingBlocks = null; static Vector<ParkingBlock> nearestParkingBlocks = null; static Vector<ParkingBlock> cached_nearestParkingBlocks = null; public static List<LatLng> pb_route_list = new ArrayList<LatLng>(); /** * Holds activity recognition data, in the form of * strings that can contain markup */ //private ArrayAdapter<Spanned> mStatusAdapter; //Instance of a Bluetooth adapter private BluetoothAdapter mBluetoothAdapter; /** * Intent filter for incoming broadcasts from the * IntentService. */ IntentFilter mBroadcastFilter; // Instance of a local broadcast manager private LocalBroadcastManager mBroadcastManager; //Instance of a customized location manager private LocationManager mLocationManager; // The logger object private LogManager mLogManager; // Instance of customized notification manager private EventDetectionNotificationManager mEventDetectionNotificationManager; // The wake lock manager object private WakeLockManager mWakeLockManager; /** * Google Activity Update Fields */ private GoogleActivityRecognitionClientRequester mGoogleActivityDetectionRequester; private GoogleActivityRecognitionClientRemover mGoogleActivityDetectionRemover; private double[] probOfOnFootAndInVehicleOfLastUpdate=new double[2]; /** * MST */ private PastMotionStates mPastGoogleActivities=new PastMotionStates(Source.Google, Constants.GOOGLE_ACTIVITY_LAST_STATE_NO); private PastMotionStates mPastClassifiedMotionStates=new PastMotionStates(Source.Classifier, Constants.NO_OF_PAST_STATES_STORED); private CachedDetectionList mCachedUnparkingDetectionList=new CachedDetectionList(CachedDetection.Type.Unparking); private CachedDetectionList mCachedParkingDetectionList=new CachedDetectionList(CachedDetection.Type.Parking); private double[] lastClassifiedMotionStateDistr=null; private double[] lastAccReading; private SensorManager mSensorManager; private Sensor mAccelerometer; private AudioRecordManager mAudioRecordManager; private FusionManager mFusionManager; /** * Detection Interval Fields */ private long lastParkingTimestamp=-1; private long lastUnparkingTimestamp=-1; /** * IODetector fields */ private CellTowerChart cellTowerChart; private LightChart lightChart; private MagnetChart magnetChart; private Handler mIODectorHandler; private boolean aggregationFinish = true; private boolean phoneNotStill = false; private int lastEnvironment=Constants.ENVIRON_UNKNOWN; private double probabilityOfLastEnvironment; private ArrayList<Integer> pastEnvironments=new ArrayList<Integer>(); private int reportGlobalNumber = 0; /** * Indicator Fusion */ @SuppressLint("UseSparseArrays") private HashMap<Integer, ArrayList<Double>> lastVectors=new HashMap<Integer, ArrayList<Double>>(); // The classification manager object private ClassificationManager mClassificationManager; // Store the current request type (ADD or REMOVE) private Constants.REQUEST_TYPE mRequestType; private SensorManager mSensorManageForMap; private Sensor accelerometer; private Sensor magnetometer; private float[] mGravity; private float[] mGeomagnetic; private SensorEventListener mSensorListnerForMap = new SensorEventListener(){ @Override public void onSensorChanged(SensorEvent event) { if (event.sensor.getType() == Sensor.TYPE_ACCELEROMETER) mGravity = event.values; if (event.sensor.getType() == Sensor.TYPE_MAGNETIC_FIELD) mGeomagnetic = event.values; if (mGravity != null && mGeomagnetic != null) { float R[] = new float[9]; float I[] = new float[9]; boolean success = SensorManager.getRotationMatrix(R, I, mGravity, mGeomagnetic); if (success) { float orientation[] = new float[3]; SensorManager.getOrientation(R, orientation); float azimut = orientation[0]; // orientation contains: azimut, pitch and roll azimuthInDegress = (float)Math.toDegrees(azimut); if (azimuthInDegress < 0.0f) { azimuthInDegress += 360.0f; } } } } @Override public void onAccuracyChanged(Sensor sensor, int accuracy) { } }; private static Location currentLoc; private static float azimuthInDegress =0; public static void adjCameraMap(Location loci1) { currentLoc = loci1; if(currentLoc.getSpeed()<10){ CameraPosition cameraPosition = new CameraPosition.Builder() .target(new LatLng(currentLoc.getLatitude(), currentLoc.getLongitude()))// Sets the center of the map to Mountain View .bearing(azimuthInDegress).tilt(30).zoom(17).build(); MainActivity.mMap.animateCamera(CameraUpdateFactory.newCameraPosition(cameraPosition)); }else{ CameraPosition cameraPosition = new CameraPosition.Builder() .target(new LatLng(currentLoc.getLatitude(), currentLoc.getLongitude()))// Sets the center of the map to Mountain View .bearing(currentLoc.getBearing()).tilt(30).zoom(17).build(); MainActivity.mMap.animateCamera(CameraUpdateFactory.newCameraPosition(cameraPosition)); } } public class BluetoothLocationClientListener implements LocationListener { int eventCode; public BluetoothLocationClientListener(int eventCode){ this.eventCode=eventCode; } @Override public void onLocationChanged(Location location) { //(new GetAddressTask(eventCode)).execute(location); BTParkingLocationReceived(eventCode, location, null); } } @Override public void onMapReady(final GoogleMap map) { this.mMap = map; mMap.setMyLocationEnabled(true); LatLng chicago = new LatLng(41.88, -87.62); mMap.moveCamera(CameraUpdateFactory.newLatLngZoom(chicago, 13)); Polygon bbx = mMap.addPolygon(new PolygonOptions() .add(new LatLng(41.884, -87.6245), new LatLng(41.8840, -87.6636), new LatLng(41.8677,-87.6641), new LatLng(41.8658, -87.6639), new LatLng(41.8633, -87.6614), new LatLng(41.8631, -87.6254) ) .strokeColor(Color.RED)); } public static int pwed = 0; public class ParkingSearchClientListener implements LocationListener { public ParkingSearchClientListener(){ } @Override public void onLocationChanged(Location location) { mMap.clear(); Polygon bbx = mMap.addPolygon(new PolygonOptions() .add(new LatLng(41.884, -87.6245), new LatLng(41.8840, -87.6636), new LatLng(41.8677,-87.6641), new LatLng(41.8658, -87.6639), new LatLng(41.8633, -87.6614), new LatLng(41.8631, -87.6254) ).zIndex(10) .strokeColor(Color.RED)); CameraPosition cameraPosition = new CameraPosition.Builder() .target(new LatLng(location.getLatitude(), location.getLongitude()) )// Sets the center of the map to Mountain View .bearing(location.getBearing()).tilt(30).zoom(17).build(); mMap.animateCamera(CameraUpdateFactory.newCameraPosition(cameraPosition)); DisplayNearestParkBlock displayNearestParkBlock = new DisplayNearestParkBlock(location); displayNearestParkBlock.execute(); //text_navigation.setText("Rid = " + pwed); pwed++; } } public void BTParkingLocationReceived(int eventCode, Location location, String address) { if(eventCode==Constants.OUTCOME_UNPARKING){ if (currentTransportationMode == DetectedActivity.IN_VEHICLE) { actionsOnBTDetection(eventCode, location, null); } else { pendingBTDetection = new BTPendingDetection(eventCode, location); Toast.makeText(getApplicationContext(), "btdetection pending", Toast.LENGTH_SHORT).show(); } }else{ pendingBTDetection = null; actionsOnBTDetection(eventCode, location, null); } } public class FusionLocationClientListener implements LocationListener{ int eventCode; public FusionLocationClientListener(int eventCode){ this.eventCode=eventCode; } @Override public void onLocationChanged(Location location) { //(new GetAddressTask(eventCode)).execute(location); onLocationRetrieved(eventCode, location, null); } } // actions taken when a parking/unparking event is detected and the location of the event is retrieved private void actionsOnBTDetection(int eventCode, Location location, String address){ //latestLocation=getLatestLocationFromIndividualProvider(location); int resID; String prefix; float markerColor; if(eventCode==Constants.OUTCOME_PARKING){ resID=R.raw.vehicle_parked; prefix=Constants.PARKING_NOTIFICATION; markerColor= BitmapDescriptorFactory.HUE_AZURE; }else{ resID=R.raw.vehicle_deparked; prefix=Constants.UNPARKING_NOTIFICATION; markerColor=BitmapDescriptorFactory.HUE_RED; } //String curTimeString=CommonUtils.formatTimestamp(new Date(),formatTemplate); String curTimeString=CommonUtils.formatTimestamp( new Date(location.getTime()), "ddMMyyyyhhmmss" ); Log.e(LOG_TAG, curTimeString + " \n" + location.toString()); /* * actions */ //1. send the text notification //String notificationMsg=prefix+" "+curTimeString; //if(address!=null) notificationMsg+=address; //mEventDetectionNotificationManager.sendTextNotification(notificationMsg); //Toast.makeText(getApplicationContext(), notificationMsg, 2).show(); //2. play the sound //mEventDetectionNotificationManager.playVoiceNotification(resID); reportGlobalNumber++; if (resID==R.raw.vehicle_parked) { String announcementOnce = reportGlobalNumber + " Bluetooth detected parking "; String announcement = announcementOnce + announcementOnce;// + announcementOnce; //mSpeech.speak(reportGlobalNumber + " Blue tooth detected parking at " + curTimeString, TextToSpeech.QUEUE_ADD, null); mSpeech.stop(); mSpeech.speak(announcement, TextToSpeech.QUEUE_ADD, null); Toast.makeText(getApplicationContext(), "Bluetooth detected parking", Toast.LENGTH_LONG).show(); SendParkReport sendPark = new SendParkReport(location, curTimeString, 1); sendPark.execute(); isParked = true; } if (resID==R.raw.vehicle_deparked) { String announcementOnce = reportGlobalNumber + " Bluetooth detected leaving parking space "; String announcement = announcementOnce + announcementOnce;// + announcementOnce; //mSpeech.speak(reportGlobalNumber + " Blue tooth detected leaving parking space at " + curTimeString, TextToSpeech.QUEUE_ADD, null); mSpeech.stop(); mSpeech.speak(announcement, TextToSpeech.QUEUE_ADD, null); Toast.makeText(getApplicationContext(), "Bluetooth detected leaving parking space", Toast.LENGTH_LONG).show(); SendParkReport sendDePark = new SendParkReport(location,curTimeString,0); sendDePark.execute(); isParked = false; } //3. log the address of event String logMsg=prefix+"\n Location retrieval time:"+curTimeString+"\nlocation:"+location.toString()+"\n"; if(address!=null){ //logMsg+=address+"\n"; //logMsg+=pastEnvironments.toString()+"\n"+pastMotionStates+"\n"; } mLogManager.log(logMsg, Constants.LOG_FILE_TYPE[Constants.LOG_TYPE_DETECTION_REPORT]); //4. show on the map if(currentPolyline!=null){ currentPolyline.remove(); } if(currentMarkers!=null){ for (int k =0;k<currentMarkers.length;k++){ currentMarkers[k].remove(); } } //mEventDetectionNotificationManager.addMarkersToMap(mMap, curTimeString, prefix // , location.getLatitude(), location.getLongitude(), location.getAltitude(), markerColor); //center and zoom in the map CameraPosition cameraPosition = new CameraPosition.Builder() .target(new LatLng(location.getLatitude(), location.getLongitude()) )// Sets the center of the map to Mountain View .bearing(location.getBearing()).tilt(30).zoom(17).build(); mMap.animateCamera(CameraUpdateFactory.newCameraPosition(cameraPosition)); //5. update availability display //updateAvailabilityDisplay(eventCode, location); //add a marker on the map Log.e(LOG_TAG, "operations on map completed"); Polygon bbx = mMap.addPolygon(new PolygonOptions() .add(new LatLng(41.884, -87.6245), new LatLng(41.8840, -87.6636), new LatLng(41.8677,-87.6641), new LatLng(41.8658, -87.6639), new LatLng(41.8633, -87.6614), new LatLng(41.8631, -87.6254) ) .strokeColor(Color.RED)); } public boolean isNetworkAvailable() { ConnectivityManager connectivityManager = (ConnectivityManager) getSystemService(Context.CONNECTIVITY_SERVICE); NetworkInfo activeNetworkInfo = connectivityManager.getActiveNetworkInfo(); return activeNetworkInfo != null && activeNetworkInfo.isConnected(); } public double distFromVehicleToIntersection(double cLat,double cLon,double iLat,double iLong) { double earthRadius = 3958.75; double dLat = Math.toRadians(iLat-cLat); double dLng = Math.toRadians(iLong-cLon); double sindLat = Math.sin(dLat / 2); double sindLng = Math.sin(dLng / 2); double a = Math.pow(sindLat, 2) + Math.pow(sindLng, 2) * Math.cos(Math.toRadians(cLat)) * Math.cos(Math.toRadians(iLat)); double c = 2 * Math.atan2(Math.sqrt(a), Math.sqrt(1-a)); double dist = earthRadius * c; return dist; } /** * A subclass of AsyncTask that calls getFromLocation() in the background. * The class definition has these generic types: Location - A Location * object containing the current location. Void - indicates that progress * units are not used String - An address passed to onPostExecute() */ private class GetAddressTask extends AsyncTask<Location, Void, String> { int eventCode; Location mLocation; public GetAddressTask(int eventCode) { super(); this.eventCode=eventCode; } /** * Get a Geocoder instance, get the latitude and longitude look up the * address, and return it * * @params params One or more Location objects * @return A string containing the address of the current location, or * an empty string if no address can be found, or an error * message */ @Override protected String doInBackground(Location... params) { Geocoder geocoder = new Geocoder(getApplicationContext(), Locale.getDefault()); // Get the current location from the input parameter list Location loc = params[0]; mLocation=loc; // Create a list to contain the result address List<Address> addresses = null; try { /* * Return 1 address. */ addresses = geocoder.getFromLocation(loc.getLatitude(), loc.getLongitude(), 1); } catch (IOException e1) { Log.e("LocationSampleActivity", "IO Exception in getFromLocation()"); e1.printStackTrace(); return ("IO Exception trying to get address"); } catch (IllegalArgumentException e2) { // Error message to post in the log String errorString = "Illegal arguments " + Double.toString(loc.getLatitude()) + " , " + Double.toString(loc.getLongitude()) + " passed to address service"; Log.e("LocationSampleActivity", errorString); e2.printStackTrace(); return errorString; } // If the reverse geocode returned an address if (addresses != null && addresses.size() > 0) { // Get the first address Address address = addresses.get(0); /* * Format the first line of address (if available), city, and * country name. */ String addressText = String.format( "%s, %s, %s", // If there's a street address, add it address.getMaxAddressLineIndex() > 0 ? address .getAddressLine(0) : "", // Locality is usually a city address.getLocality(), // The country of the address address.getCountryName()); // Return the text return addressText; } else { return "No address found"; } } /** * A method that's called once doInBackground() completes. Turn * off the indeterminate activity indicator and set * the text of the UI element that shows the address. If the * lookup failed, display the error message. */ @Override protected void onPostExecute(String address) { // Display the results of the lookup. onLocationRetrieved(eventCode, mLocation, address); } } /** Callback when a message is sent from some service */ private final BroadcastReceiver mBroadcastReceiver = new BroadcastReceiver() { @SuppressLint("UseSparseArrays") @Override public void onReceive(Context context, Intent intent) { String action=intent.getAction(); Log.e(LOG_TAG, action); if(action.equals("BT_Alert_Box")) { selectInitBtDevice(); } if(action.equals(Constants.BLUETOOTH_CONNECTION_UPDATE)) { int eventCode = intent.getIntExtra(Constants.BLUETOOTH_CON_UPDATE_EVENT_CODE, Constants.OUTCOME_NONE); System.out.println(eventCode); mLocationRequest = new LocationRequest(); mLocationRequest.setPriority(LocationRequest.PRIORITY_HIGH_ACCURACY).setNumUpdates(1); LocationServices.FusedLocationApi.requestLocationUpdates( mGoogleApiClient, mLocationRequest, new BluetoothLocationClientListener(eventCode)); }else{ //TODO return from Google activity update if(action.equals(Constants.GOOGLE_ACTIVITY_RECOGNITION_UPDATE)){ String mostLikelyActivity=intent.getStringExtra(Constants.GOOGLE_ACT_UPDATE_MOST_LIKELY_ACTIVITY_TYPE); float mostLikelyActivityConfidence=intent.getFloatExtra(Constants.GOOGLE_ACT_UPDATE_MOST_LIKELY_ACTIVITY_CONFIDENCE, 0); float onFootConfidence=intent.getFloatExtra(Constants.GOOGLE_ACT_UPDATE_ON_FOOT_ACTIVITY_CONFIDENCE, 0); float inVehicleConfidence=intent.getFloatExtra(Constants.GOOGLE_ACT_UPDATE_IN_VEHICLE_ACTIVITY_CONFIDENCE, 0); int mostLikelyActivityType=intent.getIntExtra(Constants.GOOGLE_ACT_UPDATE_MOST_LIKELY_ACTIVITY_TYPE_INT, DetectedActivity.UNKNOWN); if(mostLikelyActivityType==DetectedActivity.UNKNOWN){ if(inVehicleConfidence>100-inVehicleConfidence-mostLikelyActivityConfidence) mostLikelyActivityType=DetectedActivity.IN_VEHICLE; else{ if(onFootConfidence>100-onFootConfidence-mostLikelyActivityConfidence) mostLikelyActivityType=DetectedActivity.ON_FOOT; } } currentTransportationMode = mostLikelyActivityType; if (currentTransportationMode == DetectedActivity.IN_VEHICLE) { if (pendingBTDetection != null && pendingBTDetection.eventCode() == Constants.OUTCOME_UNPARKING) { Toast.makeText(getApplicationContext(), "mode=invehicle, bt detection confirmed", Toast.LENGTH_LONG).show(); actionsOnBTDetection(pendingBTDetection.eventCode(), pendingBTDetection.location(), null); pendingBTDetection = null; } } MotionState.Type activityType=MotionState.translate(mostLikelyActivityType); mPastGoogleActivities.add(activityType); if(activityType==MotionState.Type.IN_VEHICLE ||activityType==MotionState.Type.ON_FOOT){ int outcome; CachedDetection oldestNotExpiredCachedDetection=null; if(activityType==MotionState.Type.IN_VEHICLE){ outcome=Constants.OUTCOME_UNPARKING; oldestNotExpiredCachedDetection=mCachedUnparkingDetectionList.get(0); }else{ outcome=Constants.OUTCOME_PARKING; oldestNotExpiredCachedDetection=mCachedParkingDetectionList.get(0); } if(mPastGoogleActivities.isTransitionTo(activityType) &&oldestNotExpiredCachedDetection!=null){ onDetectionConfirmed(outcome, oldestNotExpiredCachedDetection.location, oldestNotExpiredCachedDetection.address); } } //update the textview googleStateTextView.setText(GOOGLE_MOBILITY_STATE_PREFIX+mostLikelyActivity+" conf:"+mostLikelyActivityConfidence + " f:"+onFootConfidence+",v:"+inVehicleConfidence); //build the new MST vector double[] probsOfNewUpdate=null; if(probOfOnFootAndInVehicleOfLastUpdate!=null){ probsOfNewUpdate=new double[]{onFootConfidence/100, inVehicleConfidence/100}; ArrayList<Double> features=new ArrayList<Double>(); features.add(probOfOnFootAndInVehicleOfLastUpdate[0]); features.add(probOfOnFootAndInVehicleOfLastUpdate[1]); features.add(probsOfNewUpdate[0]); features.add(probsOfNewUpdate[0]); HashMap<Integer, ArrayList<Double>> mstVector=new HashMap<Integer, ArrayList<Double>>(); mstVector.put(Constants.INDICATOR_MST, features); // Log.d(LOG_TAG, "Google MST Vector: "+features.toString()); } probOfOnFootAndInVehicleOfLastUpdate=probsOfNewUpdate; } } } }; //accelerometer feature window and its neighboring windows private ArrayList<AccelerometerFeature> civVectorsWithinScope=new ArrayList<AccelerometerFeature>(); //TODO mSensnorEvent @SuppressLint("UseSparseArrays") public static long acceleometerSeq=0; private final SensorEventListener mSensorEventListener = new SensorEventListener() { @SuppressLint("UseSparseArrays") public void onSensorChanged(SensorEvent event) { // check if the accelerometer readings have changed since last sample boolean readingChanged=false; for(int i=0;i<event.values.length;i++){ if(event.values[i]!=lastAccReading[i]){ readingChanged=true; lastAccReading[i]=event.values[i]; } } if(!readingChanged) return; acceleometerSeq=(acceleometerSeq+1)%Integer.MAX_VALUE; // requires a wake lock mWakeLockManager.lock(LOCK_TAG); /** * Get the parameter values from the preference */ SharedPreferences mPrefs=getSharedPreferences(Constants.SHARED_PREFERENCES, 0); boolean classifierForCIVOn=mPrefs.getBoolean(Constants.PREFERENCE_KEY_CIV_CLASSIFIER_ON, false); boolean logOn=mPrefs.getBoolean(Constants.LOGGING_ON, false); boolean isOutdoor=mPrefs.getBoolean(Constants.PREFERENCE_KEY_IS_OUTDOOR, false); // log the raw readings String record=CommonUtils.buildALogRecordForNewAccelerometerReadings(event); if(record!=null) phoneNotStill=true; else phoneNotStill=false; boolean logRawOn=mPrefs.getBoolean(Constants.LOGGING_ACCL_RAW_SWITCH, false); if(logOn&&logRawOn){ mLogManager.log(record, Constants.LOG_FILE_TYPE[Constants.LOG_TYPE_ACCEL_RAW]); } int outcome=Constants.OUTCOME_NONE; //conditions for early exit based on environment if( (lastEnvironment==Constants.ENVIRON_INDOOR&&probabilityOfLastEnvironment>0.8) //|| !pastMotionStates.contains((Integer)Constants.STATE_DRIVING) ){ if(!isOutdoor)//not set to outdoor environment return; } /*boolean localDebug=true;//TODO for debug only if(localDebug) return; */ //boolean useGoogleActivityInFusion=mPrefs.getBoolean(Constants.PREFERENCE_KEY_USE_GOOGLE_ACTIVITY_IN_FUSION, false); //MST Classifier And Fusion AccelerometerFeature motionStateFeatures=mClassificationManager.mMSTFeatureExtraction.extractWindowFeature(event); if(motionStateFeatures!=null){ String motionStateInstance=motionStateFeatures.asStringForMotationState(); WekaClassifier motionStateClassifier=mClassificationManager.mClassfiers.get(Constants.ACCEL_MOTION_STATE); double[] distr=motionStateClassifier.classify(motionStateInstance); Log.e(LOG_TAG, "motion state classifier output is : " + Arrays.toString(distr)); /** * Get the motion state with largest probability */ int predClassIdx=CommonUtils.idxOfMax(distr); if(predClassIdx!=-1){ String predClass=Constants.CLASSIFIER_CLASS[1][predClassIdx]; if(!phoneNotStill) predClass="Still"; Log.e(LOG_TAG, "cur motion state="+predClass); stateTextView.setText(STATE_PREFIX+predClass); mPastClassifiedMotionStates.add(MotionState.translate(predClass)); } //early exit based on state if(//!mPastGoogleActivities.containsAtLeastOneWalkingAndOneParking() !mPastClassifiedMotionStates.containsAtLeastMOnFootAndAtLeastNInVehicleStates(1,1) ) return; if(lastClassifiedMotionStateDistr!=null){ //build the vector of the MST indicator ArrayList<Double> mstVector=new ArrayList<Double>(); mstVector.add(lastClassifiedMotionStateDistr[0] ); mstVector.add(lastClassifiedMotionStateDistr[1]); mstVector.add(distr[0]); mstVector.add(distr[1]); Log.e(LOG_TAG, acceleometerSeq+" new mst vector is :"+mstVector.toString()); HashMap<Integer, ArrayList<Double>> newPeriodicalVector=new HashMap<Integer, ArrayList<Double>>(); newPeriodicalVector.put(Constants.INDICATOR_MST, mstVector); outcome=mFusionManager.fuse(lastVectors, newPeriodicalVector, System.currentTimeMillis(), Constants.HIGH_LEVEL_ACTIVITY_UPARKING, mLogManager); } lastClassifiedMotionStateDistr=distr; //lastMotionStateDistr=new double[distr.length]; //for(int ii=0;ii<distr.length;ii++) lastMotionStateDistr[ii]=distr[ii]; }else{ if(//!mPastGoogleActivities.containsAtLeastOneWalkingAndOneParking() !mPastClassifiedMotionStates.containsAtLeastMOnFootAndAtLeastNInVehicleStates(1,1) ) return; } AccelerometerFeature civFeatures=mClassificationManager.mCIVFeatureExtraction.extractWindowFeature(event); if(civFeatures!=null){ //get the vector of the Change-In-Variance features String civVector=mClassificationManager.mCIVFeatureExtraction.extractCIVVector(civFeatures, civVectorsWithinScope); if( civVector!=null){ Log.e(LOG_TAG, acceleometerSeq+" new civ vector is : "+civVector); boolean logAcclFeaturesOn=mPrefs.getBoolean(Constants.LOGGING_ACCL_FEATURES_SWITCH, false); if(logOn&&logAcclFeaturesOn){ // log the Change-In-Variance Classifier predicated result mLogManager.log(civVector, Constants.LOG_FILE_TYPE[Constants.LOG_TYPE_ACCEL_FEATURE]); } /** * calculate the probability of the outcome */ if(!classifierForCIVOn){ HashMap<Integer, ArrayList<Double>> newPeriodicalVector=new HashMap<Integer, ArrayList<Double>>(); newPeriodicalVector.put(Constants.INDICATOR_CIV,CommonUtils.stringToDoubleListRemoved(civVector, ",", new int[]{0}) ); outcome=mFusionManager.fuse(lastVectors, newPeriodicalVector, System.currentTimeMillis(),Constants.HIGH_LEVEL_ACTIVITY_UPARKING, mLogManager); } /** * classify the vector of the Change-In-Variance vectors */ else{ WekaClassifier changeInVarianceClassifier=mClassificationManager.mClassfiers.get(Constants.ACCEL_CHANGE_IN_VAR); double[] distr=changeInVarianceClassifier.classify(civVector); int predClassInt=CommonUtils.idxOfMax(distr); String predClass=",n"; switch(predClassInt){ case Constants.CIV_SIGNI_INCREASE: case Constants.CIV_SIGNI_DECREASE: //log the feature if(predClassInt==Constants.CIV_SIGNI_INCREASE){ predClass=",p"; outcome=Constants.OUTCOME_PARKING; } else{ predClass=",u"; outcome=Constants.OUTCOME_UNPARKING; } break; case Constants.STATE_STILL: // log the feature predClass=",t"; //release the lock mWakeLockManager.unlock(LOCK_TAG); outcome=Constants.OUTCOME_NONE; break; default: outcome=Constants.OUTCOME_NONE; break; } System.out.println(predClass); } } } boolean logDetectionOn=mPrefs.getBoolean(Constants.LOGGING_DETECTION_SWITCH, false); switch(outcome){ case Constants.OUTCOME_PARKING: case Constants.OUTCOME_UNPARKING: mLocationRequest = new LocationRequest(); mLocationRequest.setPriority(LocationRequest.PRIORITY_HIGH_ACCURACY).setNumUpdates(1); LocationServices.FusedLocationApi.requestLocationUpdates( mGoogleApiClient, mLocationRequest,new FusionLocationClientListener(outcome)); /* mLocationClient.requestLocationUpdates( LocationRequest.create() .setNumUpdates(1) .setPriority(LocationRequest.PRIORITY_HIGH_ACCURACY), new FusionLocationClientListener(outcome));*/ //} break; case Constants.OUTCOME_NONE: if(logOn){ if(logDetectionOn){ mLogManager.log("outcome="+outcome+"\n"+mFusionManager.fusionProcessLog.toString()+"\n", Constants.LOG_FILE_TYPE[Constants.LOG_TYPE_DETECTION_REPORT]); } } default: break; } } public void onAccuracyChanged(Sensor sensor, int accuracy) { } }; /** * A single callback class that will be used to handle * all location notifications sent by WPS. */ private class XPSLocationCallback implements WPSLocationCallback { private int eventCode; public XPSLocationCallback(int eventCode) { this.eventCode=eventCode; } public void done(){ } public WPSContinuation handleError(final WPSReturnCode error) { // To retry the location call on error use WPS_CONTINUE, // otherwise return WPS_STOP Log.e(LOG_TAG, "WPS API return error "+error.toString()); //return WPSContinuation.WPS_CONTINUE; return WPSContinuation.WPS_STOP; } @Override public void handleWPSLocation(WPSLocation location) { //actionsOnParkingLocation(eventCode, (Location) location); } } /* * Set main UI layout, get a handle to the ListView for logs, and create the broadcast * receiver. */ @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); if (onCreateCalled) { return; } else { onCreateCalled = true; } mContext = this; mSensorManageForMap = (SensorManager) getSystemService(Context.SENSOR_SERVICE); accelerometer = mSensorManageForMap.getDefaultSensor(Sensor.TYPE_ACCELEROMETER); magnetometer = mSensorManageForMap.getDefaultSensor(Sensor.TYPE_MAGNETIC_FIELD); mSensorManageForMap.registerListener(mSensorListnerForMap, accelerometer, SensorManager.SENSOR_DELAY_NORMAL); mSensorManageForMap.registerListener(mSensorListnerForMap, magnetometer, SensorManager.SENSOR_DELAY_NORMAL); /***************************************************/ /** * Set the views */ // Set the main layout setContentView(R.layout.activity_main); text_parking_info = (TextView) findViewById(R.id.textview_park); text_navigation = (TextView) findViewById(R.id.textview1); // set up the map view MapFragment mapFragment = (MapFragment) getFragmentManager() .findFragmentById(R.id.map); mapFragment.getMapAsync(this); //Wait till internet connection is established userID = Settings.Secure.getString(getApplicationContext().getContentResolver(), Settings.Secure.ANDROID_ID); if (mGoogleApiClient == null) { mGoogleApiClient = new GoogleApiClient.Builder(this) .addConnectionCallbacks(this) .addOnConnectionFailedListener(this) .addApi(LocationServices.API) .build(); } // get a handle to the console textview consoleTextView = (TextView) findViewById(R.id.console_text_id); consoleTextView.setMovementMethod(new ScrollingMovementMethod()); //setup monitoring fields environTextView=(TextView) findViewById(R.id.environment); environTextView.setText(ENVIRONMENT_PREFIX+CommonUtils.eventCodeToString(lastEnvironment)); stateTextView=(TextView) findViewById(R.id.state); stateTextView.setText(STATE_PREFIX+"unknown"); googleStateTextView=(TextView) findViewById(R.id.google_state); googleStateTextView.setText(GOOGLE_MOBILITY_STATE_PREFIX+"unknown"); //indicatorTextView=(TextView) findViewById(R.id.indicator); //indicatorTextView.setText(INDICATOR_PREFIX); /*Send Data to Server*/ ImageButton park_search = (ImageButton)findViewById(R.id.Park); park_search.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { if(!mGoogleApiClient.isConnected()){ mGoogleApiClient.connect(); } isParked = false; mParkingLocationRequest = new LocationRequest(); mParkingLocationRequest.setPriority(LocationRequest.PRIORITY_HIGH_ACCURACY).setNumUpdates(1); LocationServices.FusedLocationApi.requestLocationUpdates( mGoogleApiClient, mParkingLocationRequest, new ParkingSearchClientListener()); } }); reportParkDepark = (ImageButton)findViewById(R.id.PDP); reportParkDepark.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { try { Location loc1 = LocationServices.FusedLocationApi.getLastLocation( mGoogleApiClient); int act = (Math.random() <= 0.5) ? 0 : 1; String time1 = CommonUtils.formatTimestamp(new Date(loc1.getTime()), "ddMMyyyyhhmmss"); SendParkReport sendAsync = new SendParkReport(loc1, time1, act); sendAsync.execute(); } catch (Exception ex) { ex.printStackTrace(); } } }); // Set the broadcast receiver intent filer mBroadcastManager = LocalBroadcastManager.getInstance(this); // Create a new Intent filter for the broadcast receiver mBroadcastFilter = new IntentFilter(Constants.ACTION_REFRESH_STATUS_LIST); mBroadcastFilter.addCategory(Constants.CATEGORY_LOCATION_SERVICES); mBroadcastFilter.addAction(Constants.BLUETOOTH_CONNECTION_UPDATE); mBroadcastFilter.addAction(Constants.GOOGLE_ACTIVITY_RECOGNITION_UPDATE); mBroadcastFilter.addAction("BT_Alert_Box"); mBroadcastManager.registerReceiver(mBroadcastReceiver, mBroadcastFilter); mBluetoothAdapter = BluetoothAdapter.getDefaultAdapter(); // Get the LogManager object mLogManager = LogManager.getInstance(this); mLocationManager = (LocationManager) getSystemService( Context.LOCATION_SERVICE ); /** * Start Google Activity Recognition */ mGoogleActivityDetectionRequester = new GoogleActivityRecognitionClientRequester(this); mGoogleActivityDetectionRemover = new GoogleActivityRecognitionClientRemover(this); startGoogleActivityRecognitionUpdates(null); mSpeech = new TextToSpeech(this, new TextToSpeech.OnInitListener() { @Override public void onInit(int status) { // TODO Auto-generated method stub int result = mSpeech.setLanguage(Locale.US); System.out.println("result = " + result); //mSpeech.speak("Vehicle deparked at 12:30", TextToSpeech.QUEUE_FLUSH, null); //check for successful instantiation if (status == TextToSpeech.SUCCESS) { if(mSpeech.isLanguageAvailable(Locale.US)==TextToSpeech.LANG_COUNTRY_AVAILABLE){ mSpeech.setLanguage(Locale.US); //Toast.makeText(this, "Set as en_US " + Locale.getDefault().getDisplayName(), Toast.LENGTH_LONG).show(); } } else if (status == TextToSpeech.ERROR) { //Toast.makeText(this, "Sorry! Text To Speech failed...", Toast.LENGTH_LONG).show(); } } }); checkGPSEnabled(); //TODO test record sample //mAudioRecordManager.recordAudioSample("/sdcard/audio.wav"); //Test extract features from audio files //String features=AudioFeatureExtraction.extractFeatures(this, "/sdcard/bus6.wav"); //mClassificationManager.mClassfiers.get(Constants.SENSOR_MICROPHONE).classify(features); } public static void showNearestAvailabilityMap(List<ParkingBlock> nearestParkingBlocks) { for (int i=0; i<nearestParkingBlocks.size(); i++) { ParkingBlock nearest_parkingBlock = nearestParkingBlocks.get(i); PolylineOptions line = new PolylineOptions().add(nearest_parkingBlock.startLocation, nearest_parkingBlock.endLocation) .width(20).color(nearest_parkingBlock.getColorByAvailability()); Polyline polyline = mMap.addPolyline(line); nearest_parkingBlock.display = polyline; } } public static void showParkableMap(List<LatLng> pblocks) { //Take the first 5 blocks and display if(currentPolyline!=null){ currentPolyline.remove(); } if(currentMarkers!=null){ for (int k =0;k<currentMarkers.length;k++){ currentMarkers[k].remove(); } } currentMarkers = new Marker[pblocks.size()-1]; // Bounding box for UIC Area Polygon bbx = mMap.addPolygon(new PolygonOptions() .add(new LatLng(41.884, -87.6245), new LatLng(41.8840, -87.6636), new LatLng(41.8677, -87.6641), new LatLng(41.8658, -87.6639), new LatLng(41.8633, -87.6614), new LatLng(41.8631, -87.6254)) .strokeColor(Color.RED)); for (int i=0; i<pblocks.size()-1; i++) { float rotationDegrees = (float) GetBearing(pblocks.get(i), pblocks.get(i+1)); // round it to a multiple of 3 and cast out 120s float adjBearing = Math.round(rotationDegrees / 3) * 3; while (adjBearing >= 120) { adjBearing -= 120; } float anchorX = 0.5f; float anchorY = 0.5f; Matrix matrix = new Matrix(); matrix.setRotate(adjBearing); Bitmap arrow_head = BitmapFactory.decodeResource(MainActivity.getContext().getResources(), R.drawable.dir_0); Bitmap arrowheadBitmap = Bitmap.createBitmap(arrow_head, 0, 0, arrow_head.getWidth(), arrow_head.getHeight(), matrix, true); currentMarkers[i] = mMap.addMarker(new MarkerOptions() .position(pblocks.get(i)) .anchor(anchorX, anchorY) .flat(true) // Cease Rotation .title(""+i) .icon(BitmapDescriptorFactory.fromBitmap(arrowheadBitmap))); } currentPolyline = mMap.addPolyline(new PolylineOptions() .addAll(pblocks) .width(5) .zIndex(100) .color(Color.BLACK)); /* CameraPosition cameraPosition = new CameraPosition.Builder() .target(new LatLng(location.getLatitude(), location.getLongitude()) ) .zoom(17) .bearing(location.getBearing()) .tilt(30) .build();*/ } static double degreesPerRadian = 180.0 / Math.PI; private static double GetBearing(LatLng from, LatLng to){ double lat1 = from.latitude * Math.PI / 180.0; double lon1 = from.longitude * Math.PI / 180.0; double lat2 = to.latitude * Math.PI / 180.0; double lon2 = to.longitude * Math.PI / 180.0; // Compute the angle. double angle = - Math.atan2( Math.sin( lon1 - lon2 ) * Math.cos( lat2 ), Math.cos( lat1 ) * Math.sin( lat2 ) - Math.sin( lat1 ) * Math.cos( lat2 ) * Math.cos( lon1 - lon2 ) ); if (angle < 0.0) angle += Math.PI * 2.0; // And convert result to degrees. angle = angle * degreesPerRadian; return angle; } /** * TODO * This class is to handle the Aggregated detection */ private class AggregatedIODetector extends AsyncTask<String, Void, String> { private DetectionProfile lightProfile[]; private DetectionProfile cellProfile[]; private DetectionProfile magnetProfile[]; private double[] normalizedProbablities; private double[] featureValues; @SuppressLint({ "UseSparseArrays", "SimpleDateFormat" }) @Override protected String doInBackground(String... param) { cellTowerChart.updateProfile();//get the cell info at time = 0 for(int i=0;i<10;i++){//get the value for the magnet at the interval of 1s for 10s try { magnetChart.updateProfile(); Thread.sleep(1000); } catch (Exception e) { } } //time = 10s lightProfile = lightChart.getProfile();//get the result from the light sensor magnetProfile = magnetChart.getProfile();//get the result from the magnet cellProfile = cellTowerChart.getProfile();//get the result from the cell tower /** * Weighted Average to combine different indicators */ /*normalizedProbablities=new double[3];//indoor, semi, outdoor Log.i("profile", "light indoor " + lightProfile[0].getConfidence() + " semi " + lightProfile[1].getConfidence() + " outdoor " + lightProfile[2].getConfidence()); Log.i("profile","magnet indoor " + magnetProfile[0].getConfidence() + " semi " + magnetProfile[1].getConfidence() + " outdoor " + magnetProfile[2].getConfidence()); Log.i("profile","cell indoor " + cellProfile[0].getConfidence() + " semi " + cellProfile[1].getConfidence() + " outdoor " + cellProfile[2].getConfidence()); for(int i=0;i<normalizedProbablities.length;i++){ //Aggregate the result normalizedProbablities[i] = lightProfile[i].getConfidence()*Constants.IODETECTOR_WEIGHT_LIGHT + magnetProfile[i].getConfidence()*Constants.IODETECTOR_WEIGHT_MAGNET + cellProfile[i].getConfidence()*Constants.IODETECTOR_WEIGHT_CELLULAR; } double sum=0; for(int i=0;i<normalizedProbablities.length;i++) sum+=normalizedProbablities[i]; for(int i=0;i<normalizedProbablities.length;i++) normalizedProbablities[i]/=sum;*/ /** * Bayesian Data Fusion */ int[] outcomes={Constants.ENVIRON_INDOOR, Constants.ENVIRON_OUTDOOR}; HashMap<Integer, ArrayList<Double>> vectorsToBeFused=new HashMap<Integer, ArrayList<Double>>(); ArrayList<Double> lightVector=new ArrayList<Double>(); ArrayList<Double> RSSVector=new ArrayList<Double>(); ArrayList<Double> magneticVector=new ArrayList<Double>(); Calendar calendar = Calendar.getInstance(); featureValues=new double[3]; if(lightChart.getLigthValue()>0){//not blocked int hour = calendar.get(Calendar.HOUR_OF_DAY); if(hour>=8 && hour<=17) vectorsToBeFused.put(Constants.INDICATOR_LIGHT_DAY, lightVector); else vectorsToBeFused.put(Constants.INDICATOR_LIGHT_NIGHT, lightVector); lightVector.add((double)lightChart.getLigthValue()); featureValues[0]=lightVector.get(0); } vectorsToBeFused.put(Constants.INDICATOR_RSS, RSSVector); RSSVector.add(cellTowerChart.currentASU); featureValues[1]=RSSVector.get(0); vectorsToBeFused.put(Constants.INDICATOR_MAGNETIC, magneticVector); magneticVector.add(magnetChart.magnetVariation); featureValues[2]=magneticVector.get(0); normalizedProbablities=mFusionManager.BayesianFusion(outcomes, vectorsToBeFused,Constants.HIGH_LEVEL_ACTIVITY_IODOOR, mLogManager); Log.d(LOG_TAG, "Baysian fusion Environment: "+Arrays.toString(normalizedProbablities)); //For logging purposes only SharedPreferences sp=getSharedPreferences(Constants.SHARED_PREFERENCES, 0); boolean logEnvironOn=sp.getBoolean(Constants.LOGGING_ENVIRON_SWITCH, false); boolean logOn=sp.getBoolean(Constants.LOGGING_ON, false); if(logOn&&logEnvironOn){ mLogManager.log( new SimpleDateFormat("HH:mm:ss").format(new Date(System.currentTimeMillis()))+","+ lightChart.getLigthValue()+","+magnetChart.magnetVariation+","+cellTowerChart.currentASU , Constants.LOG_FILE_TYPE[Constants.LOG_TYPE_ENVIRONMENT]); } return null; } //After calculation has been done, post the result to the user @Override protected void onPostExecute(String result2) { if(normalizedProbablities[0] > normalizedProbablities[1] // && normalizedProbablities[0] >= normalizedProbablities[1] ){//Indoor lastEnvironment =Constants.ENVIRON_INDOOR;//updating the condition for the comparison graph probabilityOfLastEnvironment=normalizedProbablities[0]; //notifyUser(view ,"You are in indoor",R.drawable.indoor_icon, 1);//triggering the notification cellTowerChart.setPrevStatus(0);//set the status for the cell tower, to be used for checking previous status when unchanged. }else{ /*if (normalizedProbablities[1] >normalizedProbablities[0] && normalizedProbablities[1] > normalizedProbablities[2]){//Semi outdoor lastEnvironment =Constants.ENVIRON_SEMI_OUTDOOR; probabilityOfLastEnvironment=normalizedProbablities[1]; cellTowerChart.setPrevStatus(1); }else{//Outdoor */ lastEnvironment = Constants.ENVIRON_OUTDOOR; probabilityOfLastEnvironment=normalizedProbablities[1]; cellTowerChart.setPrevStatus(2); //} } if(pastEnvironments.size()==Constants.NO_OF_PAST_STATES_STORED){ pastEnvironments.remove(0); } pastEnvironments.add(lastEnvironment); String environText=ENVIRONMENT_PREFIX+CommonUtils.eventCodeToString(lastEnvironment); if(Constants.IS_DEBUG){ for(int i=0;i<normalizedProbablities.length;i++){ environText+=" "+String.format("%.2f", normalizedProbablities[i]); } } environTextView.setText(environText+" " +"light:"+String.format("%.1f", featureValues[0]) + ", RSS:"+String.format("%.1f", featureValues[1])); aggregationFinish = true;//calculation finish } } @Override protected void onResume() { super.onResume(); mGoogleApiClient.connect(); } protected void onStart() { super.onStart(); mGoogleApiClient.connect(); } protected void onStop() { super.onStop(); // mGoogleApiClient.disconnect(); } @Override protected void onPause() { super.onPause(); } @Override protected void onDestroy(){ super.onDestroy(); mGoogleApiClient.disconnect(); mSensorManageForMap.unregisterListener(mSensorListnerForMap); } /* * Create the menu */ @Override public boolean onCreateOptionsMenu(Menu menu) { //displayParkingInfo(); MenuInflater inflater = getMenuInflater(); inflater.inflate(R.menu.menu, menu); return true; } /** * Handle Performance Tuning Click */ private void handleAdvancedSetting(){ final Dialog dialog = new Dialog(this); dialog.setTitle(R.string.menu_item_advanced_settings); dialog.setContentView(R.layout.advanced_setting); final SharedPreferences mPrefs = getSharedPreferences(Constants.SHARED_PREFERENCES, Context.MODE_PRIVATE); final SharedPreferences.Editor editor=mPrefs.edit(); final ToggleButton classifierForCIVOnButton=(ToggleButton)dialog.findViewById(R.id.civ_classifier_on); classifierForCIVOnButton.setChecked(mPrefs.getBoolean(Constants.PREFERENCE_KEY_CIV_CLASSIFIER_ON, false)); final ToggleButton isOutdoorButton=(ToggleButton)dialog.findViewById(R.id.is_outdoor); isOutdoorButton.setChecked(mPrefs.getBoolean(Constants.PREFERENCE_KEY_IS_OUTDOOR, false)); final EditText notificationTresholdText=(EditText)dialog.findViewById(R.id.notification_threshold); notificationTresholdText.setText(String.format("%.2f", mPrefs.getFloat(Constants.PREFERENCE_KEY_NOTIFICATION_THRESHOLD, (float)Constants.DEFAULT_DETECTION_THRESHOLD)) ); //final EditText detectionIntervalText=(EditText)dialog.findViewById(R.id.detection_interval); //detectionIntervalText.setText(String.valueOf(mPrefs.getInt(Constants.PREFERENCE_KEY_DETECTION_INTERVAL, Constants.DETECTION_INTERVAL_DEFAULT_VALUE) )); final EditText googleActivityUpdateIntervalText=(EditText)dialog.findViewById(R.id.google_activity_update_interval); googleActivityUpdateIntervalText.setText( String.valueOf(mPrefs.getInt(Constants.PREFERENCE_KEY_GOOGLE_ACTIVITY_UPDATE_INTERVAL, Constants.GOOGLE_ACTIVITY_UPDATE_INTERVAL_DEFAULT_VALUE)) ); //final ToggleButton useGoogleActivityInFusion=(ToggleButton)dialog.findViewById(R.id.use_google_for_motion_state_in_fusion); //useGoogleActivityInFusion.setChecked(mPrefs.getBoolean(Constants.PREFERENCE_KEY_USE_GOOGLE_ACTIVITY_IN_FUSION, false)); final ToggleButton logAcclRawButton=(ToggleButton)dialog.findViewById(R.id.log_raw_switch); logAcclRawButton.setChecked(mPrefs.getBoolean(Constants.LOGGING_ACCL_RAW_SWITCH, false)); final ToggleButton logAcclFeaturesButton=(ToggleButton)dialog.findViewById(R.id.log_accl_features_switch); logAcclFeaturesButton.setChecked(mPrefs.getBoolean(Constants.LOGGING_ACCL_FEATURES_SWITCH, false)); final ToggleButton logDetectionButton=(ToggleButton)dialog.findViewById(R.id.log_report_switch); logDetectionButton.setChecked(mPrefs.getBoolean(Constants.LOGGING_DETECTION_SWITCH, false)); final ToggleButton logErrorButton=(ToggleButton)dialog.findViewById(R.id.log_error_switch); logErrorButton.setChecked(mPrefs.getBoolean(Constants.LOGGING_ERROR_SWITCH, true)); //final EditText deltaForConditionalProb=(EditText)dialog.findViewById(R.id.normal_dist_delta); //deltaForConditionalProb.setText(String.valueOf(mPrefs.getFloat(Constants.CIV_DELTA_CONDITIONAL_PROBABILITY, 2)) ); final Button applyButton = (Button) dialog.findViewById(R.id.performance_apply_button); final Button cancelButton = (Button) dialog.findViewById(R.id.peformance_cancel_button); applyButton.setOnClickListener(new View.OnClickListener() { @Override public void onClick(final View v) { if (classifierForCIVOnButton.isChecked()) editor.putBoolean(Constants.PREFERENCE_KEY_CIV_CLASSIFIER_ON, true); else editor.putBoolean(Constants.PREFERENCE_KEY_CIV_CLASSIFIER_ON, false); if (isOutdoorButton.isChecked()) editor.putBoolean(Constants.PREFERENCE_KEY_IS_OUTDOOR, true); else editor.putBoolean(Constants.PREFERENCE_KEY_IS_OUTDOOR, false); if (logAcclRawButton.isChecked()) editor.putBoolean(Constants.LOGGING_ACCL_RAW_SWITCH, true); else editor.putBoolean(Constants.LOGGING_ACCL_RAW_SWITCH, false); if (logAcclFeaturesButton.isChecked()) editor.putBoolean(Constants.LOGGING_ACCL_FEATURES_SWITCH, true); else editor.putBoolean(Constants.LOGGING_ACCL_FEATURES_SWITCH, false); if (logDetectionButton.isChecked()) editor.putBoolean(Constants.LOGGING_DETECTION_SWITCH, true); else editor.putBoolean(Constants.LOGGING_DETECTION_SWITCH, false); if (logErrorButton.isChecked()) editor.putBoolean(Constants.LOGGING_ERROR_SWITCH, true); else editor.putBoolean(Constants.LOGGING_ERROR_SWITCH, false); float notificationTreshold; try{ notificationTreshold=Float.parseFloat( notificationTresholdText.getText().toString()); }catch(Exception ex){ notificationTreshold=(float)Constants.DEFAULT_DETECTION_THRESHOLD; } editor.putFloat(Constants.PREFERENCE_KEY_NOTIFICATION_THRESHOLD, notificationTreshold); /*int detectionInterval; try{ detectionInterval=Integer.parseInt( detectionIntervalText.getText().toString()); }catch(Exception ex){ detectionInterval=Constants.DETECTION_INTERVAL_DEFAULT_VALUE; } editor.putInt(Constants.PREFERENCE_KEY_DETECTION_INTERVAL, detectionInterval);*/ /*if (useGoogleActivityInFusion.isChecked()) editor.putBoolean(Constants.PREFERENCE_KEY_USE_GOOGLE_ACTIVITY_IN_FUSION, true); else editor.putBoolean(Constants.PREFERENCE_KEY_USE_GOOGLE_ACTIVITY_IN_FUSION, false);*/ int googleActivityUpdateInterval; try{ googleActivityUpdateInterval=Integer.parseInt( googleActivityUpdateIntervalText.getText().toString()); }catch(Exception ex){ googleActivityUpdateInterval=Constants.GOOGLE_ACTIVITY_UPDATE_INTERVAL_DEFAULT_VALUE; } editor.putInt(Constants.PREFERENCE_KEY_GOOGLE_ACTIVITY_UPDATE_INTERVAL, googleActivityUpdateInterval); /*try{ Float delta=Float.parseFloat(deltaForConditionalProb.getText().toString()); editor.putFloat(Constants.CIV_DELTA_CONDITIONAL_PROBABILITY, delta); }catch(Exception ex){ Toast.makeText(getApplicationContext(), "Input must be a float number", Toast.LENGTH_SHORT).show(); }*/ editor.commit(); dialog.cancel(); } }); cancelButton.setOnClickListener(new View.OnClickListener() { @Override public void onClick(final View v) { dialog.cancel(); } }); dialog.show(); } /** * Handle Setting click */ private void handleSettings() { final Dialog dialog = new Dialog(this); dialog.setTitle(R.string.menu_item_settings); dialog.setContentView(R.layout.settings); final SharedPreferences mPrefs = getSharedPreferences(Constants.SHARED_PREFERENCES, Context.MODE_PRIVATE); final SharedPreferences.Editor editor=mPrefs.edit(); final ToggleButton logOnButton=(ToggleButton)dialog.findViewById(R.id.log_on); logOnButton.setChecked(mPrefs.getBoolean(Constants.LOGGING_ON, false)); final Button btDeviceSelectButton=(Button)dialog.findViewById(R.id.bt_device_button); btDeviceSelectButton.setOnClickListener(new View.OnClickListener() { @Override public void onClick(final View v) { if(mBluetoothAdapter.isEnabled()){ selectBluetoothDevice(); }else{ Toast.makeText(getApplicationContext(), "Please enable your Bluetooth first.", Toast.LENGTH_SHORT).show(); } } }); final Button applyButton = (Button) dialog.findViewById(R.id.apply_button); final Button cancelButton = (Button) dialog.findViewById(R.id.cancel_button); applyButton.setOnClickListener(new View.OnClickListener() { @Override public void onClick(final View v) { if (logOnButton.isChecked()) editor.putBoolean(Constants.LOGGING_ON, true); else editor.putBoolean(Constants.LOGGING_ON, false); editor.commit(); dialog.cancel(); } }); cancelButton.setOnClickListener(new View.OnClickListener() { @Override public void onClick(final View v) { dialog.cancel(); } }); dialog.show(); } /* * Handle selections from the menu */ @Override public boolean onOptionsItemSelected(MenuItem item) { // Handle item selection switch (item.getItemId()) { /* // Clear the log display and remove the log files case R.id.menu_item_clearlog: return true; // Display the update log case R.id.menu_item_showlog: // Continue by passing true to the menu handler return true;*/ case R.id.menu_item_settings: handleSettings(); return true; case R.id.menu_item_showSensors: Intent i= new Intent(MainActivity.this, Sensors.class); startActivity(i); return true; case R.id.menu_item_advanced_settings: handleAdvancedSetting(); return true; case R.id.menu_item_show_route: Location mLastLocation = LocationServices.FusedLocationApi.getLastLocation( mGoogleApiClient); new SearchParkAsyncTask(mLastLocation).execute(); return true; case R.id.menu_item_ping_server: PingServer ps = new PingServer(); ps.execute(); return true; case R.id.menu_item_pdp: int st = reportParkDepark.getVisibility(); if(st==View.INVISIBLE) { reportParkDepark.setVisibility(View.VISIBLE); }else{ reportParkDepark.setVisibility(View.INVISIBLE); } return true; // For any other choice, pass it to the super() case R.id.menu_item_stop_routing: // mGoogleApiClient.disconnect(); isParked = true; mMap.clear(); default: return super.onOptionsItemSelected(item); } } /** Make sure that GPS is enabled */ public void checkGPSEnabled() { if ( !mLocationManager.isProviderEnabled(LocationManager.GPS_PROVIDER) ) { Log.e(LOG_TAG, "GPS not enabled yet"); /** Ask user to enable GPS */ final AlertDialog enableGPS = new AlertDialog.Builder(this) .setTitle(Constants.APP_NAME+ " needs access to GPS. Please enable GPS.") .setPositiveButton("Press here to enable GPS", new DialogInterface.OnClickListener() { public void onClick(final DialogInterface dialog, final int id) { startActivityForResult(new Intent(android.provider.Settings.ACTION_LOCATION_SOURCE_SETTINGS), Constants.SENSOR_GPS); } }) .setCancelable(false) .create(); /*.setNegativeButton("Skip", new DialogInterface.OnClickListener() { public void onClick(@SuppressWarnings("unused") final DialogInterface dialog, @SuppressWarnings("unused") final int id) { } })*/ enableGPS.show(); }else{ Log.e(LOG_TAG, "GPS already enabled"); //GPS already enabled checkBluetoothEnabled(); } } /** Make sure that Bluetooth is enabled */ public void checkBluetoothEnabled() { if (mBluetoothAdapter == null) { // Device does not support Bluetooth AlertDialog noBluetoothAlert = new AlertDialog.Builder(this) .setTitle("Bluetooth not supported.") .setPositiveButton("Exit", new DialogInterface.OnClickListener() { public void onClick(final DialogInterface dialog, final int id) { } }) .setCancelable(true).create(); noBluetoothAlert.show(); writeToConsole("This phone does not have Bluetooth capability. Bluetooth connection method will not work."); return; } if (!mBluetoothAdapter.isEnabled()) { Log.e(LOG_TAG, "bluetooth not enabled yet"); /** Ask user to enable Bluetooth */ AlertDialog enableBluetoothDialog = new AlertDialog.Builder(this) .setTitle("Please enable Bluetooth on your phone.") .setCancelable(false) .setPositiveButton("Enable Bluetooth", new DialogInterface.OnClickListener() { public void onClick( final DialogInterface dialog, final int id) { startActivityForResult(new Intent(BluetoothAdapter.ACTION_REQUEST_ENABLE), Constants.SENSOR_BLUETOOTH); } }) .setNegativeButton("Skip", new DialogInterface.OnClickListener() { public void onClick( final DialogInterface dialog,final int id) {} }).create(); enableBluetoothDialog.show(); } else { selectInitBtDevice(); } } /* * Handle results returned to this Activity by other Activities started with * startActivityForResult(). In particular, the method onConnectionFailed() in * DetectionRemover and DetectionRequester may call startResolutionForResult() to * start an Activity that handles Google Play services problems. The result of this * call returns here, to onActivityResult. */ @Override protected void onActivityResult(int requestCode, int resultCode, Intent intent) { // Choose what to do based on the request code Log.e(LOG_TAG, requestCode+" "+requestCode); switch (requestCode) { case Constants.SENSOR_GPS: checkBluetoothEnabled(); break; case Constants.SENSOR_BLUETOOTH: if(mBluetoothAdapter.isEnabled()){//only if the user enables the bluetooth checkBluetoothEnabled(); } break; case Constants.MY_DATA_CHECK_CODE: if (resultCode == TextToSpeech.Engine.CHECK_VOICE_DATA_PASS) { //the user has the necessary data - create the TTS //myTTS = new TextToSpeech(this, this); } else { //no data - install it now Intent installTTSIntent = new Intent(); installTTSIntent.setAction(TextToSpeech.Engine.ACTION_INSTALL_TTS_DATA); startActivity(installTTSIntent); } break; // If the request code matches the code sent in onConnectionFailed case Constants.CONNECTION_FAILURE_RESOLUTION_REQUEST: switch (resultCode) { // If Google Play services resolved the problem case Activity.RESULT_OK: // If the request was to start activity recognition updates if (Constants.REQUEST_TYPE.ADD == mRequestType) { // Restart the process of requesting activity recognition // updates mGoogleActivityDetectionRequester.requestUpdates(); // If the request was to remove activity recognition updates } else if (Constants.REQUEST_TYPE.REMOVE == mRequestType) { /* * Restart the removal of all activity recognition updates * for the PendingIntent. */ mGoogleActivityDetectionRemover.removeUpdates(mGoogleActivityDetectionRequester .getRequestPendingIntent()); } break; // If any other result was returned by Google Play services default: // Report that Google Play services was unable to resolve the // problem. Log.d(Constants.APP_NAME, getString(R.string.no_resolution)); } // If any other request code was received default: // Report that this Activity received an unknown requestCode Log.d(Constants.APP_NAME, getString(R.string.unknown_activity_request_code, requestCode)); break; } } public void selectInitBtDevice() { SharedPreferences sharedPreferences=getSharedPreferences(Constants.SHARED_PREFERENCES, Context.MODE_PRIVATE); final String targetDeviceName = sharedPreferences.getString(Constants.BLUETOOTH_CAR_DEVICE_NAME, null); if(targetDeviceName != null){ AlertDialog bt_change = new AlertDialog.Builder(this) .setTitle("Your Car Bluetooth Device selected as "+targetDeviceName) .setPositiveButton("CONFIRM",new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { // TODO Auto-generated method stub Toast.makeText(getApplicationContext(), "bluetooth service started for "+targetDeviceName, Toast.LENGTH_LONG).show(); Intent intent = new Intent(MainActivity.this, BluetoothConnectionService.class); startService(intent); } }) .setNegativeButton("CHANGE", new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { // TODO Auto-generated method stub selectBluetoothDevice(); } }).create(); bt_change.show(); } else{ selectBluetoothDevice(); } } private String selectedBloothDeviceName=null; public void selectBluetoothDevice() { Set<BluetoothDevice> bluetoothDevices=mBluetoothAdapter.getBondedDevices(); final CharSequence[] listItems = new CharSequence[bluetoothDevices.size()]; int i=0; for (BluetoothDevice device : mBluetoothAdapter.getBondedDevices()) { String device_name = device.getName(); listItems[i++]=device_name; } AlertDialog select=new AlertDialog.Builder(this) .setTitle(R.string.set_bluetooth_message) .setSingleChoiceItems(listItems, -1, new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int whichButton) { Log.e(LOG_TAG, "id="+whichButton); if(whichButton>=0) selectedBloothDeviceName=listItems[whichButton].toString(); } }) .setPositiveButton("OK", new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int whichButton) { Log.e(LOG_TAG, selectedBloothDeviceName); Toast.makeText(getApplicationContext(), getString(R.string.bluetooth_device_selected, selectedBloothDeviceName) , Toast.LENGTH_SHORT).show(); final SharedPreferences mPrefs = getSharedPreferences(Constants.SHARED_PREFERENCES, Context.MODE_PRIVATE); SharedPreferences.Editor editor=mPrefs.edit(); editor.putString(Constants.BLUETOOTH_CAR_DEVICE_NAME, selectedBloothDeviceName); editor.commit(); Intent intent = new Intent(MainActivity.this, BluetoothConnectionService.class); startService(intent); } }) .setNegativeButton("Cancel", new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int whichButton) { } }) .create(); select.show(); } /** Write a string to output console */ public void writeToConsole(String str) { consoleTextView.append(str); final Layout layout = consoleTextView.getLayout(); if(layout != null){ int scrollDelta = layout.getLineBottom(consoleTextView.getLineCount() - 1) - consoleTextView.getScrollY() - consoleTextView.getHeight(); if(scrollDelta > 0) consoleTextView.scrollBy(0, scrollDelta); } } private void onDetectionConfirmed(int eventCode, Location location, String address){ int resID; String prefix; float markerColor; if(eventCode==Constants.OUTCOME_PARKING){ resID=R.raw.vehicle_parked; prefix=Constants.PARKING_NOTIFICATION; markerColor=BitmapDescriptorFactory.HUE_AZURE; }else{//unparking resID=R.raw.vehicle_deparked; prefix=Constants.UNPARKING_NOTIFICATION; markerColor=BitmapDescriptorFactory.HUE_RED; } //String curTimeString=CommonUtils.formatTimestamp(new Date(),formatTemplate); String curTimeString=CommonUtils.formatTimestamp( new Date(location.getTime()), "HH:mm:ss " ); Log.e(LOG_TAG, curTimeString+" \n"+location.toString() ); /* * actions */ //1. send the text notification String notificationMsg=prefix+" "+curTimeString; if(address!=null) notificationMsg+=address; mEventDetectionNotificationManager.sendTextNotification(notificationMsg); //2. play the sound //mEventDetectionNotificationManager.playVoiceNotification(resID); reportGlobalNumber++; if (resID==R.raw.vehicle_parked) { mSpeech.speak(reportGlobalNumber + " Fusion detected parking at " + curTimeString, TextToSpeech.QUEUE_ADD, null); Toast.makeText(getApplicationContext(), "Fusion detected leaving parking space at " + curTimeString, Toast.LENGTH_LONG).show(); } if (resID==R.raw.vehicle_deparked) { mSpeech.speak(reportGlobalNumber + " Fusion detected leaving parking space at " + curTimeString, TextToSpeech.QUEUE_ADD, null); Toast.makeText(getApplicationContext(), "Fusion detected leaving parking space at " + curTimeString, Toast.LENGTH_LONG).show(); } //3. log the address of event String logMsg=prefix+"\nNotification generation time:"+curTimeString+"\nlocation:"+location.toString()+"\n"; if(address!=null){ logMsg+=address+"\n"; logMsg+=pastEnvironments.toString()+"\n" +mPastClassifiedMotionStates.toString()+"\n" +mPastGoogleActivities.toString()+"\n"; } boolean logDetection=getSharedPreferences(Constants.SHARED_PREFERENCES, 0).getBoolean(Constants.LOGGING_DETECTION_SWITCH, false); if(logDetection) mLogManager.log(logMsg, Constants.LOG_FILE_TYPE[Constants.LOG_TYPE_DETECTION_REPORT]); //4. show on the map mMap.clear(); mEventDetectionNotificationManager.addMarkersToMap(mMap, curTimeString, prefix , location.getLatitude(), location.getLongitude(), location.getAltitude(), markerColor); //center and zoom in the map CameraPosition cameraPosition = new CameraPosition.Builder() .target(new LatLng(location.getLatitude(), location.getLongitude()) ) // Sets the center of the map to Mountain View .zoom(17) // Sets the zoom .bearing(location.getBearing()) // Sets the orientation of the camera to east .tilt(30) // Sets the tilt of the camera to 30 degrees .build(); // Creates a CameraPosition from the builder mMap.animateCamera(CameraUpdateFactory.newCameraPosition(cameraPosition)); //add a marker on the map Log.e(LOG_TAG, "operations on map completed"); //5. update availability display //updateAvailabilityDisplay(eventCode, location); //add a marker on the map Log.e(LOG_TAG, "operations on map completed"); //updateAvailabilityDisplay(eventCode, location); } // actions taken when a parking/unparking event is detected and the location of the event is retrieved private void onLocationRetrieved(int eventCode, Location location, String address){ //latestLocation=getLatestLocationFromIndividualProvider(location); String logMsg= (eventCode==Constants.OUTCOME_PARKING?Constants.PARKING_NOTIFICATION:Constants.UNPARKING_NOTIFICATION)+ "\nlocatoin retrieval time:"+CommonUtils.formatTimestamp( new Date(location.getTime()), "HH:mm:ss " )+"\nlocation:"+location.toString()+"\n"; if(address!=null){ logMsg+=address+"\n"; logMsg+=pastEnvironments.toString()+"\n" +mPastClassifiedMotionStates.toString()+"\n" +mPastGoogleActivities.toString()+"\n"; } boolean logDetection=getSharedPreferences(Constants.SHARED_PREFERENCES, 0).getBoolean(Constants.LOGGING_DETECTION_SWITCH, false); if(logDetection) mLogManager.log(logMsg, Constants.LOG_FILE_TYPE[Constants.LOG_TYPE_DETECTION_REPORT]); if(eventCode==Constants.OUTCOME_PARKING){//parking if(mPastGoogleActivities.isTransitionTo(MotionState.Type.ON_FOOT)){ onDetectionConfirmed(eventCode, location, address); }else{ CachedDetection cd=new CachedDetection(CachedDetection.Type.Parking, location, System.currentTimeMillis(), address); mCachedParkingDetectionList.add(cd); } }else{//unparking if(mPastGoogleActivities.isTransitionTo(MotionState.Type.IN_VEHICLE)){ onDetectionConfirmed(eventCode, location, address); }else{ CachedDetection cd=new CachedDetection(CachedDetection.Type.Unparking, location, System.currentTimeMillis(), address); mCachedUnparkingDetectionList.add(cd); } } } /* public void updateAvailabilityDisplay(int eventCode, Location location) { //Put a star on location // find closest street block within 30 meters LatLng point = new LatLng(location.getLatitude(),location.getLongitude()); double minDist = Double.MAX_VALUE; ParkingBlock matchedBlock = null; int matched_block_id; for (int i = 0; i < nearestParkingBlocks.size(); i++) { ParkingBlock parkingBlock = nearestParkingBlocks.elementAt(i); double dist = parkingBlock.distanceToPoint(point); if (dist < minDist) { minDist = dist; matchedBlock = parkingBlock; } } int index = nearestParkingBlocks.indexOf(matchedBlock); if (matchedBlock != null) { //Toast.makeText(getApplicationContext(), "a block matched", 2).show(); String block_name = matchedBlock.meterAddress*//*speechConditioner(matchedBlock.meterAddress)*//*; if(eventCode==Constants.OUTCOME_PARKING) { //matchedBlock.availability = 0; nearestParkingBlocks.elementAt(index).availability -=1; mSpeech.speak("Vehicle Parked at"+block_name, TextToSpeech.QUEUE_ADD, null); } else { //matchedBlock.availability = 1; nearestParkingBlocks.elementAt(index).availability +=1; mSpeech.speak("Vehicle DeParked at"+block_name, TextToSpeech.QUEUE_ADD, null); } } }*/ @Override public void onConnectionFailed(ConnectionResult arg0) { } /** * Legacy codes */ //private double calibration = 0.0; private double currentAcceleration; private double appliedAcceleration = 0; private Date lastUpdate; @SuppressWarnings("unused") private double calVelocityIncrease() { // Calculate how long this acceleration has been applied. Date timeNow = new Date(System.currentTimeMillis()); double timeDelta = timeNow.getTime()-lastUpdate.getTime(); lastUpdate.setTime(timeNow.getTime()); // Calculate the change in velocity // current acceleration since the last update. double deltaVelocity = appliedAcceleration * (timeDelta/1000); appliedAcceleration = currentAcceleration; // Add the velocity change to the current velocity. return deltaVelocity; } /** * Verify that Google Play services is available before making a request. * * @return true if Google Play services is available, otherwise false */ private boolean isGooglePlayServiceAvailable() { // Check that Google Play services is available int resultCode = GooglePlayServicesUtil.isGooglePlayServicesAvailable(this); // If Google Play services is available if (ConnectionResult.SUCCESS == resultCode) { // In debug mode, log the status Log.d(Constants.APP_NAME, getString(R.string.play_services_available)); // Continue return true; // Google Play services was not available for some reason } else { // Display an error dialog GooglePlayServicesUtil.getErrorDialog(resultCode, this, 0).show(); return false; } } /** * Respond to "Start" button by requesting activity recognition * updates. * @param view The view that triggered this method. */ public void startGoogleActivityRecognitionUpdates(View view) { // Check for Google Play services if (!isGooglePlayServiceAvailable()) { return; } /* * Set the request type. If a connection error occurs, and Google Play services can * handle it, then onActivityResult will use the request type to retry the request */ mRequestType = Constants.REQUEST_TYPE.ADD; // Pass the update request to the requester object mGoogleActivityDetectionRequester.requestUpdates(); } /** * Respond to "Stop" button by canceling updates. * @param view The view that triggered this method. */ public void stopGoogleActivityRecognitionUpdates(View view) { // Check for Google Play services if (!isGooglePlayServiceAvailable()) { return; } /* * Set the request type. If a connection error occurs, and Google Play services can * handle it, then onActivityResult will use the request type to retry the request */ mRequestType = Constants.REQUEST_TYPE.REMOVE; // Pass the remove request to the remover object mGoogleActivityDetectionRemover.removeUpdates(mGoogleActivityDetectionRequester.getRequestPendingIntent()); /* * Cancel the PendingIntent. Even if the removal request fails, canceling the PendingIntent * will stop the updates. */ PendingIntent pIntent=mGoogleActivityDetectionRequester.getRequestPendingIntent(); if(pIntent!=null) pIntent.cancel(); } /** * Display the activity detection history stored in the * log file */ /*private void updateActivityHistory() { // Try to load data from the history file try { // Load log file records into the List List<Spanned> activityDetectionHistory = mLogManager.loadLogFile(); // Clear the adapter of existing data mStatusAdapter.clear(); // Add each element of the history to the adapter for (Spanned activity : activityDetectionHistory) { mStatusAdapter.add(activity); } // If the number of loaded records is greater than the max log size if (mStatusAdapter.getCount() > Constants.MAX_LOG_SIZE) { AlertDialog.Builder builder = new AlertDialog.Builder(this); builder.setMessage("File is too large to be shown.") .setCancelable(true); final AlertDialog alert = builder.create(); alert.show(); // Delete the old log file if (!mLogFile.removeLogFiles()) { // Log an error if unable to delete the log file Log.e(Constants.APPTAG, getString(R.string.log_file_deletion_error)); } } // Trigger the adapter to update the display mStatusAdapter.notifyDataSetChanged(); // If an error occurs while reading the history file } catch (IOException e) { Log.e(Constants.APP_NAME, e.getMessage(), e); } }*/ public static Context getContext(){ return mContext; } /** * Broadcast receiver that receives activity update intents * It checks to see if the ListView contains items. If it * doesn't, it pulls in history. * This receiver is local only. It can't read broadcast Intents from other apps. */ BroadcastReceiver updateListReceiver = new BroadcastReceiver() { @Override public void onReceive(Context context, Intent intent) { /* * When an Intent is received from the update listener IntentService, update * the displayed log. */ //do not execute an update to avoid freezing the app //updateActivityHistory(); } }; @Override public void onInit(int status) { // TODO Auto-generated method stub } @Override public PendingResult<Status> acceptConnectionRequest(GoogleApiClient arg0, String arg1, byte[] arg2, MessageListener arg3) { // TODO Auto-generated method stub return null; } @Override public void disconnectFromEndpoint(GoogleApiClient arg0, String arg1) { // TODO Auto-generated method stub } @Override public String getLocalDeviceId(GoogleApiClient arg0) { // TODO Auto-generated method stub return null; } @Override public String getLocalEndpointId(GoogleApiClient arg0) { // TODO Auto-generated method stub return null; } @Override public PendingResult<Status> rejectConnectionRequest(GoogleApiClient arg0, String arg1) { // TODO Auto-generated method stub return null; } @Override public PendingResult<Status> sendConnectionRequest(GoogleApiClient arg0, String arg1, String arg2, byte[] arg3, ConnectionResponseCallback arg4, MessageListener arg5) { // TODO Auto-generated method stub return null; } @Override public void sendReliableMessage(GoogleApiClient arg0, String arg1, byte[] arg2) { // TODO Auto-generated method stub } @Override public void sendReliableMessage(GoogleApiClient arg0, List<String> arg1, byte[] arg2) { // TODO Auto-generated method stub } @Override public void sendUnreliableMessage(GoogleApiClient arg0, String arg1, byte[] arg2) { // TODO Auto-generated method stub } @Override public void sendUnreliableMessage(GoogleApiClient arg0, List<String> arg1, byte[] arg2) { // TODO Auto-generated method stub } @Override public PendingResult<StartAdvertisingResult> startAdvertising( GoogleApiClient arg0, String arg1, AppMetadata arg2, long arg3, ConnectionRequestListener arg4) { // TODO Auto-generated method stub return null; } @Override public PendingResult<Status> startDiscovery(GoogleApiClient arg0, String arg1, long arg2, EndpointDiscoveryListener arg3) { // TODO Auto-generated method stub return null; } @Override public void stopAdvertising(GoogleApiClient arg0) { // TODO Auto-generated method stub } @Override public void stopAllEndpoints(GoogleApiClient arg0) { // TODO Auto-generated method stub } @Override public void stopDiscovery(GoogleApiClient arg0, String arg1) { // TODO Auto-generated method stub } @Override public void onConnected(Bundle arg0) { // TODO Auto-generated method stub } @Override public void onConnectionSuspended(int arg0) { // TODO Auto-generated method stub } } class PastMotionStates{ public int capacity; public Source source; public HashMap<MotionState.Type, Integer> map; public ArrayList<MotionState.Type> list; public long timestampOfLastInVehicleState; public long timestampOfLastOnFootState; public static final long EXPIRATION_TIME_IN_MILLISEC=Constants.ONE_MINUTE+Constants.ONE_MINUTE/2; public PastMotionStates(Source source, int capacity) { this.source = source; this.capacity = capacity; map = new HashMap<MotionState.Type, Integer>(); list = new ArrayList<MotionState.Type>(); } public void clear(){ map.clear(); list.clear(); } public void add(MotionState.Type state) { if (list.size() == capacity) { MotionState.Type removedMotionType = list.remove(0);// remove the oldest state map.put(removedMotionType, map.get(removedMotionType) - 1); } list.add(state); if (!map.containsKey(state)) map.put(state, 0); map.put(state, map.get(state) + 1); } public void removeAll(MotionState.Type state) { while(list.remove(state)); map.remove(state); } public boolean isTransitionTo(MotionState.Type state){ if(state!=MotionState.Type.IN_VEHICLE&&state!=MotionState.Type.ON_FOOT) return false; boolean ret=containsAtLeastMOnFootAndAtLeastNInVehicleStates(1, 1)&&containsOnlyOneAndLater(state); if(ret){ if(state==MotionState.Type.IN_VEHICLE) removeAll(MotionState.Type.ON_FOOT); else removeAll(MotionState.Type.IN_VEHICLE); } return ret; } public boolean containsAtLeastMOnFootAndAtLeastNInVehicleStates(int mOnFoot, int nInVehicle) { // return false if the filter fails if (!map.containsKey(MotionState.Type.ON_FOOT) || !map.containsKey(MotionState.Type.IN_VEHICLE)) return false; int walkingCnt = map.get(MotionState.Type.ON_FOOT); int drivingCnt = map.get(MotionState.Type.IN_VEHICLE); // Log.e(LOG_TAG,"#Walk="+walkingCnt+" #Drive="+drivingCnt); if (walkingCnt < mOnFoot || drivingCnt < nInVehicle) return false; return true; } //Type equals to either On_foot or In_vehicle public boolean containsOnlyOneAndLater(MotionState.Type type) { if (!map.containsKey(type)||map.get(type)!=1) return false; for(int i=list.size()-1;i>=0;i--){ MotionState.Type curType=list.get(i); if(curType!=MotionState.Type.ON_FOOT&&curType!=MotionState.Type.IN_VEHICLE) continue; if(curType==type) return true; else return false; } return false; } public String toString() { String ret = list.toString() + "\n"; for (Type type : map.keySet()) ret += type.toString() + ":" + map.get(type) + " "; return ret; } } class MotionState { public enum Source { Google, Classifier; } public enum Type { ON_FOOT("On_Foot"), IN_VEHICLE("In_Vehicle"), STILL("Still"), UNKNOWN( "Unknown"), ON_BIKE("On_Bike"), OTHER("Other"); private String typeString; private Type(String type) { this.typeString = type; } public String toString() { return typeString; } } public Source source; public Type type; public int secondOfDay; public static MotionState.Type translate(String predClass) { MotionState.Type ret; if ("Walking".equals(predClass)) { ret=MotionState.Type.ON_FOOT; } else { if ("Driving".equals(predClass)) ret=MotionState.Type.IN_VEHICLE; else { if ("Still".equals(predClass)) ret=MotionState.Type.STILL; else ret=MotionState.Type.OTHER; } } return ret; } public static MotionState.Type translate(int activityTypeDefinedByGoogle) { MotionState.Type ret; switch (activityTypeDefinedByGoogle) { case DetectedActivity.ON_FOOT: ret=MotionState.Type.ON_FOOT; break; case DetectedActivity.IN_VEHICLE: ret=MotionState.Type.IN_VEHICLE; break; case DetectedActivity.STILL: ret=MotionState.Type.STILL; break; case DetectedActivity.ON_BICYCLE: ret=MotionState.Type.ON_BIKE; default: ret=MotionState.Type.UNKNOWN; break; } return ret; } } class CachedDetection{ public enum Type{ Parking, Unparking } public long timestamp; public Location location; public String address; public Type type; public static final long EXPIRATION_TIME=Constants.ONE_MINUTE; public CachedDetection(Type type, Location loc, long time, String address){ timestamp=time; location=loc; this.type=type; this.address=address; } } class CachedDetectionList{ CachedDetection.Type type; ArrayList<CachedDetection> list; public CachedDetectionList(CachedDetection.Type type) { this.type=type; list=new ArrayList<CachedDetection>(); } public void removeExpiredCachedDetection(){ //remove expired cached detections long curtime=System.currentTimeMillis(); int i; ArrayList<CachedDetection> newList=new ArrayList<CachedDetection>(); for(i=0;i<list.size();i++){ if(curtime-list.get(i).timestamp<=CachedDetection.EXPIRATION_TIME){ newList.add(list.get(i)); } } list=newList; } public void add(CachedDetection cd){ removeExpiredCachedDetection(); //add the new one list.add(cd); } public CachedDetection get(int index){ removeExpiredCachedDetection(); if(index<0||index>=list.size()) return null; return list.get(index); } }
Beta version release v2.01
app/src/main/java/com/uic/sandeep/phonepark/MainActivity.java
Beta version release v2.01
<ide><path>pp/src/main/java/com/uic/sandeep/phonepark/MainActivity.java <ide> .strokeColor(Color.RED)); <ide> } <ide> <del>public static int pwed = 0; <add> <ide> public class ParkingSearchClientListener implements LocationListener { <ide> <ide> public ParkingSearchClientListener(){ <ide> mMap.animateCamera(CameraUpdateFactory.newCameraPosition(cameraPosition)); <ide> DisplayNearestParkBlock displayNearestParkBlock = new DisplayNearestParkBlock(location); <ide> displayNearestParkBlock.execute(); <del> <del> //text_navigation.setText("Rid = " + pwed); <del> pwed++; <ide> <ide> } <ide> } <ide> } <ide> } <ide> <del> <del>/* public void updateAvailabilityDisplay(int eventCode, Location location) { <del> //Put a star on location <del> <del> // find closest street block within 30 meters <del> LatLng point = new LatLng(location.getLatitude(),location.getLongitude()); <del> double minDist = Double.MAX_VALUE; <del> ParkingBlock matchedBlock = null; <del> int matched_block_id; <del> for (int i = 0; i < nearestParkingBlocks.size(); i++) { <del> ParkingBlock parkingBlock = nearestParkingBlocks.elementAt(i); <del> double dist = parkingBlock.distanceToPoint(point); <del> if (dist < minDist) { <del> minDist = dist; <del> matchedBlock = parkingBlock; <del> } <del> } <del> <del> int index = nearestParkingBlocks.indexOf(matchedBlock); <del> <del> if (matchedBlock != null) { <del> //Toast.makeText(getApplicationContext(), "a block matched", 2).show(); <del> String block_name = matchedBlock.meterAddress*//*speechConditioner(matchedBlock.meterAddress)*//*; <del> if(eventCode==Constants.OUTCOME_PARKING) { <del> //matchedBlock.availability = 0; <del> nearestParkingBlocks.elementAt(index).availability -=1; <del> mSpeech.speak("Vehicle Parked at"+block_name, TextToSpeech.QUEUE_ADD, null); <del> } else { <del> //matchedBlock.availability = 1; <del> nearestParkingBlocks.elementAt(index).availability +=1; <del> mSpeech.speak("Vehicle DeParked at"+block_name, TextToSpeech.QUEUE_ADD, null); <del> } <del> } <del> }*/ <del> <del> <ide> @Override <ide> public void onConnectionFailed(ConnectionResult arg0) { <ide> }
Java
mit
4e412e6bcc9a9bfe811fcbc023406bb5c9500242
0
nfleet/java-sdk
package fi.cosky.sdk; /* * This file is subject to the terms and conditions defined in * file 'LICENSE.txt', which is part of this source code package. */ public class RoutingProblemSettingsUpdateRequest { public static final String MimeType = RoutingProblemSettingsData.MimeType; public static final double MimeVersion = RoutingProblemSettingsData.MimeVersion; private double DefaultVehicleSpeedFactor; private SpeedProfile DefaultVehicleSpeedProfile; private double InsertionAggressiveness; private String AlgorithmTree; private String DateTimeFormatString; /** * Default constructor with good values for settings. These * are also the ones that NFleet uses when settings * are not specified */ public RoutingProblemSettingsUpdateRequest() { this.DefaultVehicleSpeedFactor = 0.9; this.DefaultVehicleSpeedProfile = SpeedProfile.Max100Kmh; this.InsertionAggressiveness = 1; } public double getDefaultVehicleSpeedFactor() { return DefaultVehicleSpeedFactor; } public void setDefaultVehicleSpeedFactor(double defaultVehicleSpeedFactor) { DefaultVehicleSpeedFactor = defaultVehicleSpeedFactor; } public SpeedProfile getDefaultVehicleSpeedProfile() { return DefaultVehicleSpeedProfile; } public void setDefaultVehicleSpeedProfile(SpeedProfile defaultVehicleSpeedProfile) { DefaultVehicleSpeedProfile = defaultVehicleSpeedProfile; } public double getInsertionAggressiveness() { return InsertionAggressiveness; } public void setInsertionAggressiveness(double insertionAggressiveness) { InsertionAggressiveness = insertionAggressiveness; } public String getAlgorithmTree() { return AlgorithmTree; } public void setAlgorithmTree(String algorithmTree) { AlgorithmTree = algorithmTree; } public String getDateTimeFormatString() { return DateTimeFormatString; } public void setDateTimeFormatString(String dateTimeFormatString) { DateTimeFormatString = dateTimeFormatString; } }
fi/cosky/sdk/RoutingProblemSettingsUpdateRequest.java
package fi.cosky.sdk; /* * This file is subject to the terms and conditions defined in * file 'LICENSE.txt', which is part of this source code package. */ public class RoutingProblemSettingsUpdateRequest { public static final String MimeType = RoutingProblemSettingsData.MimeType; public static final double MimeVersion = RoutingProblemSettingsData.MimeVersion; private double DefaultVehicleSpeedFactor; private SpeedProfile DefaultVehicleSpeedProfile; private double InsertionAggressiveness; private String AlgorithmTree; private String DateTimeFormatString; public double getDefaultVehicleSpeedFactor() { return DefaultVehicleSpeedFactor; } public void setDefaultVehicleSpeedFactor(double defaultVehicleSpeedFactor) { DefaultVehicleSpeedFactor = defaultVehicleSpeedFactor; } public SpeedProfile getDefaultVehicleSpeedProfile() { return DefaultVehicleSpeedProfile; } public void setDefaultVehicleSpeedProfile(SpeedProfile defaultVehicleSpeedProfile) { DefaultVehicleSpeedProfile = defaultVehicleSpeedProfile; } public double getInsertionAggressiveness() { return InsertionAggressiveness; } public void setInsertionAggressiveness(double insertionAggressiveness) { InsertionAggressiveness = insertionAggressiveness; } public String getAlgorithmTree() { return AlgorithmTree; } public void setAlgorithmTree(String algorithmTree) { AlgorithmTree = algorithmTree; } public String getDateTimeFormatString() { return DateTimeFormatString; } public void setDateTimeFormatString(String dateTimeFormatString) { DateTimeFormatString = dateTimeFormatString; } }
Created a default constructor for routingproblemsettingsupdaterequest. Values set in constructor reflect good default values for settings.
fi/cosky/sdk/RoutingProblemSettingsUpdateRequest.java
Created a default constructor for routingproblemsettingsupdaterequest. Values set in constructor reflect good default values for settings.
<ide><path>i/cosky/sdk/RoutingProblemSettingsUpdateRequest.java <ide> private double InsertionAggressiveness; <ide> private String AlgorithmTree; <ide> private String DateTimeFormatString; <add> <add> /** <add> * Default constructor with good values for settings. These <add> * are also the ones that NFleet uses when settings <add> * are not specified <add> */ <add> public RoutingProblemSettingsUpdateRequest() { <add> this.DefaultVehicleSpeedFactor = 0.9; <add> this.DefaultVehicleSpeedProfile = SpeedProfile.Max100Kmh; <add> this.InsertionAggressiveness = 1; <add> } <ide> <ide> public double getDefaultVehicleSpeedFactor() { <ide> return DefaultVehicleSpeedFactor;
Java
apache-2.0
error: pathspec 'jsprit-core/src/test/java/jsprit/core/algorithm/RefuseCollectionWithCostsHigherThanTimesAndFiniteFleet_IT.java' did not match any file(s) known to git
78ec852f8608257f8149b9a72a4ad52bc05d9871
1
HeinrichFilter/jsprit,michalmac/jsprit,balage1551/jsprit,graphhopper/jsprit,sinhautkarsh2014/winter_jsprit,muzuro/jsprit
/******************************************************************************* * Copyright (C) 2013 Stefan Schroeder * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 3.0 of the License, or (at your option) any later version. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this library. If not, see <http://www.gnu.org/licenses/>. ******************************************************************************/ package jsprit.core.algorithm; import static org.junit.Assert.assertEquals; import java.io.BufferedReader; import java.io.File; import java.io.FileNotFoundException; import java.io.FileReader; import java.io.IOException; import java.util.Collection; import jsprit.core.algorithm.box.GreedySchrimpfFactory; import jsprit.core.algorithm.termination.IterationWithoutImprovementTermination; import jsprit.core.problem.VehicleRoutingProblem; import jsprit.core.problem.VehicleRoutingProblem.FleetSize; import jsprit.core.problem.job.Service; import jsprit.core.problem.solution.VehicleRoutingProblemSolution; import jsprit.core.problem.vehicle.Vehicle; import jsprit.core.problem.vehicle.VehicleImpl; import jsprit.core.problem.vehicle.VehicleTypeImpl; import jsprit.core.reporting.SolutionPrinter; import jsprit.core.reporting.SolutionPrinter.Print; import jsprit.core.util.Solutions; import jsprit.core.util.VehicleRoutingTransportCostsMatrix; import jsprit.core.util.VehicleRoutingTransportCostsMatrix.Builder; import org.junit.Test; public class RefuseCollectionWithCostsHigherThanTimesAndFiniteFleet_IT { static class RelationKey { static RelationKey newKey(String from, String to){ int fromInt = Integer.parseInt(from); int toInt = Integer.parseInt(to); if(fromInt < toInt){ return new RelationKey(from, to); } else { return new RelationKey(to, from); } } final String from; final String to; public RelationKey(String from, String to) { super(); this.from = from; this.to = to; } /* (non-Javadoc) * @see java.lang.Object#hashCode() */ @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + ((from == null) ? 0 : from.hashCode()); result = prime * result + ((to == null) ? 0 : to.hashCode()); return result; } /* (non-Javadoc) * @see java.lang.Object#equals(java.lang.Object) */ @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; RelationKey other = (RelationKey) obj; if (from == null) { if (other.from != null) return false; } else if (!from.equals(other.from)) return false; if (to == null) { if (other.to != null) return false; } else if (!to.equals(other.to)) return false; return true; } } // static class RoutingCosts extends AbstractForwardVehicleRoutingTransportCosts { // // private Map<RelationKey,Integer> distances; // // public RoutingCosts(Map<RelationKey, Integer> distances) { // super(); // this.distances = distances; // } // // @Override // public double getTransportTime(String fromId, String toId, double departureTime, Driver driver, Vehicle vehicle) { // return getTransportCost(fromId, toId, departureTime, driver, vehicle)/2.; // } // // @Override // public double getTransportCost(String fromId, String toId,double departureTime, Driver driver, Vehicle vehicle) { // if(fromId.equals(toId)) return 0.0; // RelationKey key = RelationKey.newKey(fromId, toId); // return distances.get(key); // } // // } @Test public void testAlgo(){ /* * create vehicle-type and vehicle */ VehicleTypeImpl.Builder typeBuilder = VehicleTypeImpl.Builder.newInstance("vehicle-type", 23); typeBuilder.setCostPerDistance(1.0); VehicleTypeImpl bigType = typeBuilder.build(); VehicleImpl.Builder vehicleBuilder = VehicleImpl.Builder.newInstance("vehicle"); vehicleBuilder.setLocationId("1"); vehicleBuilder.setType(bigType); vehicleBuilder.setLatestArrival(220); Vehicle bigVehicle = vehicleBuilder.build(); /* * start building the problem */ VehicleRoutingProblem.Builder vrpBuilder = VehicleRoutingProblem.Builder.newInstance(); vrpBuilder.setFleetSize(FleetSize.INFINITE); vrpBuilder.addVehicle(bigVehicle); /* * create cost-matrix */ VehicleRoutingTransportCostsMatrix.Builder matrixBuilder = VehicleRoutingTransportCostsMatrix.Builder.newInstance(true); /* * read demand quantities */ try { readDemandQuantities(vrpBuilder); readDistances(matrixBuilder); } catch (FileNotFoundException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } vrpBuilder.setRoutingCost(matrixBuilder.build()); VehicleRoutingProblem vrp = vrpBuilder.build(); VehicleRoutingAlgorithm vra = new GreedySchrimpfFactory().createAlgorithm(vrp); vra.setPrematureAlgorithmTermination(new IterationWithoutImprovementTermination(100)); Collection<VehicleRoutingProblemSolution> solutions = vra.searchSolutions(); SolutionPrinter.print(vrp, Solutions.bestOf(solutions), Print.VERBOSE); assertEquals(2.*397.,Solutions.bestOf(solutions).getCost(),0.01); assertEquals(2,Solutions.bestOf(solutions).getRoutes().size()); } private static void readDemandQuantities(VehicleRoutingProblem.Builder vrpBuilder) throws FileNotFoundException, IOException { BufferedReader reader = new BufferedReader(new FileReader(new File("src/test/resources/refuseCollectionExample_Quantities"))); String line = null; boolean firstLine = true; while((line = reader.readLine()) != null){ if(firstLine) { firstLine = false; continue; } String[] lineTokens = line.split(","); /* * build service */ Service service = Service.Builder.newInstance(lineTokens[0], Integer.parseInt(lineTokens[1])).setLocationId(lineTokens[0]).build(); /* * and add it to problem */ vrpBuilder.addJob(service); } reader.close(); } private static void readDistances(Builder matrixBuilder) throws IOException { BufferedReader reader = new BufferedReader(new FileReader(new File("src/test/resources/refuseCollectionExample_Distances"))); String line = null; boolean firstLine = true; while((line = reader.readLine()) != null){ if(firstLine) { firstLine = false; continue; } String[] lineTokens = line.split(","); matrixBuilder.addTransportDistance(lineTokens[0],lineTokens[1], 2.*Integer.parseInt(lineTokens[2])); matrixBuilder.addTransportTime(lineTokens[0],lineTokens[1], Integer.parseInt(lineTokens[2])); } reader.close(); } }
jsprit-core/src/test/java/jsprit/core/algorithm/RefuseCollectionWithCostsHigherThanTimesAndFiniteFleet_IT.java
add test to reproduce bug #80
jsprit-core/src/test/java/jsprit/core/algorithm/RefuseCollectionWithCostsHigherThanTimesAndFiniteFleet_IT.java
add test to reproduce bug #80
<ide><path>sprit-core/src/test/java/jsprit/core/algorithm/RefuseCollectionWithCostsHigherThanTimesAndFiniteFleet_IT.java <add>/******************************************************************************* <add> * Copyright (C) 2013 Stefan Schroeder <add> * <add> * This library is free software; you can redistribute it and/or <add> * modify it under the terms of the GNU Lesser General Public <add> * License as published by the Free Software Foundation; either <add> * version 3.0 of the License, or (at your option) any later version. <add> * <add> * This library is distributed in the hope that it will be useful, <add> * but WITHOUT ANY WARRANTY; without even the implied warranty of <add> * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU <add> * Lesser General Public License for more details. <add> * <add> * You should have received a copy of the GNU Lesser General Public <add> * License along with this library. If not, see <http://www.gnu.org/licenses/>. <add> ******************************************************************************/ <add>package jsprit.core.algorithm; <add> <add>import static org.junit.Assert.assertEquals; <add> <add>import java.io.BufferedReader; <add>import java.io.File; <add>import java.io.FileNotFoundException; <add>import java.io.FileReader; <add>import java.io.IOException; <add>import java.util.Collection; <add> <add>import jsprit.core.algorithm.box.GreedySchrimpfFactory; <add>import jsprit.core.algorithm.termination.IterationWithoutImprovementTermination; <add>import jsprit.core.problem.VehicleRoutingProblem; <add>import jsprit.core.problem.VehicleRoutingProblem.FleetSize; <add>import jsprit.core.problem.job.Service; <add>import jsprit.core.problem.solution.VehicleRoutingProblemSolution; <add>import jsprit.core.problem.vehicle.Vehicle; <add>import jsprit.core.problem.vehicle.VehicleImpl; <add>import jsprit.core.problem.vehicle.VehicleTypeImpl; <add>import jsprit.core.reporting.SolutionPrinter; <add>import jsprit.core.reporting.SolutionPrinter.Print; <add>import jsprit.core.util.Solutions; <add>import jsprit.core.util.VehicleRoutingTransportCostsMatrix; <add>import jsprit.core.util.VehicleRoutingTransportCostsMatrix.Builder; <add> <add>import org.junit.Test; <add> <add> <add> <add>public class RefuseCollectionWithCostsHigherThanTimesAndFiniteFleet_IT { <add> <add> static class RelationKey { <add> <add> static RelationKey newKey(String from, String to){ <add> int fromInt = Integer.parseInt(from); <add> int toInt = Integer.parseInt(to); <add> if(fromInt < toInt){ <add> return new RelationKey(from, to); <add> } <add> else { <add> return new RelationKey(to, from); <add> } <add> } <add> <add> final String from; <add> final String to; <add> <add> public RelationKey(String from, String to) { <add> super(); <add> this.from = from; <add> this.to = to; <add> } <add> <add> /* (non-Javadoc) <add> * @see java.lang.Object#hashCode() <add> */ <add> @Override <add> public int hashCode() { <add> final int prime = 31; <add> int result = 1; <add> result = prime * result + ((from == null) ? 0 : from.hashCode()); <add> result = prime * result + ((to == null) ? 0 : to.hashCode()); <add> return result; <add> } <add> <add> /* (non-Javadoc) <add> * @see java.lang.Object#equals(java.lang.Object) <add> */ <add> @Override <add> public boolean equals(Object obj) { <add> if (this == obj) <add> return true; <add> if (obj == null) <add> return false; <add> if (getClass() != obj.getClass()) <add> return false; <add> RelationKey other = (RelationKey) obj; <add> if (from == null) { <add> if (other.from != null) <add> return false; <add> } else if (!from.equals(other.from)) <add> return false; <add> if (to == null) { <add> if (other.to != null) <add> return false; <add> } else if (!to.equals(other.to)) <add> return false; <add> return true; <add> } <add> } <add> <add>// static class RoutingCosts extends AbstractForwardVehicleRoutingTransportCosts { <add>// <add>// private Map<RelationKey,Integer> distances; <add>// <add>// public RoutingCosts(Map<RelationKey, Integer> distances) { <add>// super(); <add>// this.distances = distances; <add>// } <add>// <add>// @Override <add>// public double getTransportTime(String fromId, String toId, double departureTime, Driver driver, Vehicle vehicle) { <add>// return getTransportCost(fromId, toId, departureTime, driver, vehicle)/2.; <add>// } <add>// <add>// @Override <add>// public double getTransportCost(String fromId, String toId,double departureTime, Driver driver, Vehicle vehicle) { <add>// if(fromId.equals(toId)) return 0.0; <add>// RelationKey key = RelationKey.newKey(fromId, toId); <add>// return distances.get(key); <add>// } <add>// <add>// } <add> <add> <add> @Test <add> public void testAlgo(){ <add> <add> <add> /* <add> * create vehicle-type and vehicle <add> */ <add> VehicleTypeImpl.Builder typeBuilder = VehicleTypeImpl.Builder.newInstance("vehicle-type", 23); <add> typeBuilder.setCostPerDistance(1.0); <add> VehicleTypeImpl bigType = typeBuilder.build(); <add> <add> VehicleImpl.Builder vehicleBuilder = VehicleImpl.Builder.newInstance("vehicle"); <add> vehicleBuilder.setLocationId("1"); <add> vehicleBuilder.setType(bigType); <add> vehicleBuilder.setLatestArrival(220); <add> Vehicle bigVehicle = vehicleBuilder.build(); <add> <add> /* <add> * start building the problem <add> */ <add> VehicleRoutingProblem.Builder vrpBuilder = VehicleRoutingProblem.Builder.newInstance(); <add> vrpBuilder.setFleetSize(FleetSize.INFINITE); <add> vrpBuilder.addVehicle(bigVehicle); <add> <add> /* <add> * create cost-matrix <add> */ <add> VehicleRoutingTransportCostsMatrix.Builder matrixBuilder = VehicleRoutingTransportCostsMatrix.Builder.newInstance(true); <add> /* <add> * read demand quantities <add> */ <add> try { <add> readDemandQuantities(vrpBuilder); <add> readDistances(matrixBuilder); <add> } catch (FileNotFoundException e) { <add> // TODO Auto-generated catch block <add> e.printStackTrace(); <add> } catch (IOException e) { <add> // TODO Auto-generated catch block <add> e.printStackTrace(); <add> } <add> <add> vrpBuilder.setRoutingCost(matrixBuilder.build()); <add> VehicleRoutingProblem vrp = vrpBuilder.build(); <add> VehicleRoutingAlgorithm vra = new GreedySchrimpfFactory().createAlgorithm(vrp); <add> vra.setPrematureAlgorithmTermination(new IterationWithoutImprovementTermination(100)); <add> Collection<VehicleRoutingProblemSolution> solutions = vra.searchSolutions(); <add> <add> SolutionPrinter.print(vrp, Solutions.bestOf(solutions), Print.VERBOSE); <add> <add> assertEquals(2.*397.,Solutions.bestOf(solutions).getCost(),0.01); <add> assertEquals(2,Solutions.bestOf(solutions).getRoutes().size()); <add> } <add> <add> <add> private static void readDemandQuantities(VehicleRoutingProblem.Builder vrpBuilder) throws FileNotFoundException, IOException { <add> BufferedReader reader = new BufferedReader(new FileReader(new File("src/test/resources/refuseCollectionExample_Quantities"))); <add> String line = null; <add> boolean firstLine = true; <add> while((line = reader.readLine()) != null){ <add> if(firstLine) { <add> firstLine = false; <add> continue; <add> } <add> String[] lineTokens = line.split(","); <add> /* <add> * build service <add> */ <add> Service service = Service.Builder.newInstance(lineTokens[0], Integer.parseInt(lineTokens[1])).setLocationId(lineTokens[0]).build(); <add> /* <add> * and add it to problem <add> */ <add> vrpBuilder.addJob(service); <add> } <add> reader.close(); <add> } <add> <add> <add> private static void readDistances(Builder matrixBuilder) throws IOException { <add> BufferedReader reader = new BufferedReader(new FileReader(new File("src/test/resources/refuseCollectionExample_Distances"))); <add> String line = null; <add> boolean firstLine = true; <add> while((line = reader.readLine()) != null){ <add> if(firstLine) { <add> firstLine = false; <add> continue; <add> } <add> String[] lineTokens = line.split(","); <add> matrixBuilder.addTransportDistance(lineTokens[0],lineTokens[1], 2.*Integer.parseInt(lineTokens[2])); <add> matrixBuilder.addTransportTime(lineTokens[0],lineTokens[1], Integer.parseInt(lineTokens[2])); <add> } <add> reader.close(); <add> <add> } <add> <add> <add>}
JavaScript
isc
d54155dce68e2db944045e293bbd473a97f3b2c3
0
jurca/subscription-feed-for-youtube,jurca/subscription-feed-for-youtube
/** * Possible states of a subscription to a YouTube channel or playlist. * * @enum {string} */ export default Object.freeze({ /** * The subscription is enabled, videos are fetched from the playlist. */ ACTIVE: "ACTIVE", /** * The subscription is disabled (either this is a disabled incognito * subscription, or the Google account has been disabled). */ DISABLED: "DISABLED", /** * The subscription is enabled, but reporting an error. */ ERROR: "ERROR" })
src/model/SubscriptionState.js
/** * Possible states of a subscription to a YouTube channel or playlist. * * @enum {string} */ export default Object.freeze({ /** * The subscription is enabled, videos are fetched from the playlist. */ ACTIVE: "ACTIVE", /** * The subscription is disabled. This is only used with incognito * subscriptions. */ DISABLED: "DISABLED", /** * The subscription is enabled, but reporting an error. */ ERROR: "ERROR" })
documentation udpate
src/model/SubscriptionState.js
documentation udpate
<ide><path>rc/model/SubscriptionState.js <ide> ACTIVE: "ACTIVE", <ide> <ide> /** <del> * The subscription is disabled. This is only used with incognito <del> * subscriptions. <add> * The subscription is disabled (either this is a disabled incognito <add> * subscription, or the Google account has been disabled). <ide> */ <ide> DISABLED: "DISABLED", <ide>
JavaScript
agpl-3.0
ad5658ada5609b6a479d2cf29bea2325265ba674
0
FredPassos/pydio-core,ChuckDaniels87/pydio-core,sespivak/pydio-core,snw35/pydio-core,sespivak/pydio-core,sespivak/pydio-core,snw35/pydio-core,FredPassos/pydio-core,snw35/pydio-core,sespivak/pydio-core,pydio/pydio-core,ChuckDaniels87/pydio-core,pydio/pydio-core,huzergackl/pydio-core,snw35/pydio-core,pydio/pydio-core,huzergackl/pydio-core,ChuckDaniels87/pydio-core,pydio/pydio-core,ChuckDaniels87/pydio-core,ChuckDaniels87/pydio-core,FredPassos/pydio-core,huzergackl/pydio-core,sespivak/pydio-core,snw35/pydio-core,huzergackl/pydio-core,FredPassos/pydio-core,FredPassos/pydio-core,sespivak/pydio-core,sespivak/pydio-core,snw35/pydio-core,snw35/pydio-core,huzergackl/pydio-core,FredPassos/pydio-core,pydio/pydio-core,FredPassos/pydio-core
Modal = Class.create({ pageLoading: true, initialize: function(){ }, initForms: function(){ this.elementName = 'generic_dialog_box'; this.htmlElement = $(this.elementName); this.dialogTitle = this.htmlElement.getElementsBySelector(".dialogTitle")[0]; this.dialogContent = this.htmlElement.getElementsBySelector(".dialogContent")[0]; this.currentForm; this.cachedForms = new Hash(); this.iframeIndex = 0; }, prepareHeader: function(sTitle, sIconSrc){ var hString = "<span class=\"titleString\">"; if(sIconSrc != "") hString = "<span class=\"titleString\"><img src=\""+sIconSrc.replace('22', '16')+"\" width=\"16\" height=\"16\" align=\"top\"/>&nbsp;"; hString += sTitle + '</span>'; this.dialogTitle.innerHTML = hString; }, showDialogForm: function(sTitle, sFormId, fOnLoad, fOnComplete, fOnCancel, bOkButtonOnly, skipButtons){ this.clearContent(this.dialogContent); //this.dialogTitle.innerHTML = sTitle; var newForm; if($(sFormId).tagName == 'FORM') // WE PASSED A PREFETCHED HIDDEN FORM { newForm = $(sFormId); newForm.show(); } else { var formDiv = $(sFormId); //var formDiv = $('all_forms').select('[id="'+sFormId+'"]')[0]; var newForm = document.createElement('form'); newForm.id = 'action_form'; newForm.setAttribute('action', 'cont.php'); newForm.appendChild(formDiv.cloneNode(true)); var reloadIFrame = null; if($(newForm).getElementsByTagName("iframe")[0]) { reloadIFrame = $(newForm).getElementsByTagName("iframe")[0]; reloadIFrameSrc = $(newForm).getElementsByTagName("iframe")[0].getAttribute("src"); } if(formDiv.getAttribute('action')) { var actionField = document.createElement('input'); actionField.setAttribute('type', 'hidden'); actionField.setAttribute('name', 'get_action'); actionField.setAttribute('value', formDiv.getAttribute('action')); newForm.appendChild(actionField); } } if(!this.cachedForms.get(sFormId) && !skipButtons){ this.addSubmitCancel(newForm, fOnCancel, bOkButtonOnly); } this.dialogContent.appendChild(newForm); if(fOnComplete) { newForm.onsubmit = function(){ try{ fOnComplete(); }catch(e){ alert('Unexpected Error : please report!\n'+e); } return false; } } else { newForm.onsubmit = function(){ ajaxplorer.actionBar.submitForm(modal.getForm()); hideLightBox(); return false; }; } this.showContent(this.elementName, $(sFormId).getAttribute("box_width"), $(sFormId).getAttribute("box_height")); if($(newForm).getElementsBySelector(".dialogFocus").length) { objToFocus = $(newForm).getElementsBySelector(".dialogFocus")[0]; setTimeout('objToFocus.focus()', 500); } if($(newForm).getElementsBySelector(".replace_rep").length) { repDisplay = $(newForm).getElementsBySelector(".replace_rep")[0]; repDisplay.innerHTML = ajaxplorer.filesList.getCurrentRep(); } if($(newForm).getElementsBySelector(".replace_file").length) { repDisplay = $(newForm).getElementsBySelector(".replace_file")[0]; repDisplay.innerHTML = getBaseName(ajaxplorer.filesList.getUserSelection().getUniqueFileName()); } this.currentForm = newForm; if(fOnLoad != null) { fOnLoad(this.currentForm); } // SAFARI => FORCE IFRAME RELOADING if(reloadIFrame) reloadIFrame.src = reloadIFrameSrc; }, showContent: function(elementName, boxWidth, boxHeight){ ajaxplorer.disableShortcuts(); ajaxplorer.disableNavigation(); ajaxplorer.filesList.blur(); //jQuery('#'+elementName).corner("round top 5px"); if(Prototype.Browser.IE){ jQuery('#'+elementName + ' .dialogTitle').corner("round top 7px"); }else{ jQuery('#'+elementName).corner("round top 7px"); } if(!this.isRounded) { ajxpCorners($(elementName), 'bottom'); this.isRounded = true; } var winWidth = $(document.body).getWidth(); var winHeight = $(document.body).getHeight(); // WIDTH / HEIGHT if(boxWidth != null){ if(boxWidth.indexOf('%') > -1){ percentWidth = parseInt(boxWidth); boxWidth = parseInt((winWidth * percentWidth) / 100); } $(elementName).setStyle({width:boxWidth+'px'}); } if(boxHeight != null){ if(boxHeight.indexOf('%') > -1){ percentHeight = parseInt(boxHeight); boxHeight = parseInt((winHeight * percentHeight) / 100); } $(elementName).setStyle({height:boxHeight+'px'}); }else{ if (Prototype.Browser.IE){ $(elementName).setStyle({height:'1%'}); }else{ $(elementName).setStyle({height:'auto'}); } } // POSITION HORIZONTAL boxWidth = $(elementName).getWidth(); var offsetLeft = (winWidth - parseInt(boxWidth)) / 2; $(elementName).setStyle({left:offsetLeft+'px'}); // POSITION VERTICAL var boxHeight = $(elementName).getHeight(); var offsetTop = parseInt(((winHeight - boxHeight)/3)); $(elementName).setStyle({top:offsetTop+'px'}); displayLightBoxById(elementName); // FORCE ABSOLUTE FOR SAFARI $(elementName).style.position = 'absolute'; // FORCE FIXED FOR FIREFOX if (Prototype.Browser.Gecko){ $(elementName).style.position = 'fixed'; } // REFRESH PNG IMAGES FOR IE! refreshPNGImages(this.dialogContent); }, getForm: function() { return this.currentForm; }, clearContent: function(object){ // REMOVE CURRENT FORM, IF ANY if(object.getElementsBySelector("form").length) { var oThis = this; object.getElementsBySelector("form").each(function(currentForm){ if(currentForm.target == 'hidden_iframe' || currentForm.id=='login_form' || currentForm.id=='user_pref_form'){ currentForm.hide(); oThis.cachedForms.set(currentForm.id,true); } else{ object.removeChild(currentForm); } }); } }, addSubmitCancel: function(oForm, fOnCancel, bOkButtonOnly){ var contDiv = document.createElement('div'); contDiv.className = 'dialogButtons'; var okButton = document.createElement('input'); okButton.setAttribute('type', 'submit'); okButton.setAttribute('name', 'sub'); okButton.setAttribute('value', MessageHash[48]); $(okButton).addClassName('dialogButton'); $(okButton).addClassName('dialogFocus'); contDiv.appendChild(okButton); if(!bOkButtonOnly) { var caButton = document.createElement('input'); caButton.setAttribute('type', 'button'); caButton.setAttribute('name', 'can'); caButton.setAttribute('value', MessageHash[49]); $(caButton).addClassName('dialogButton'); if(fOnCancel){ caButton.onclick = function(){fOnCancel();hideLightBox();return false;}; } else{ caButton.onclick = function(){hideLightBox();return false;}; } contDiv.appendChild(caButton); } oForm.appendChild(contDiv); oForm.hasButtons = true; }, setLoadingStepCounts: function(count){ this.loadingStepsCount = count; this.loadingStep = count; }, incrementStepCounts: function(add){ this.loadingStepsCount += add; this.loadingStep += add; }, updateLoadingProgress: function(state){ this.loadingStep --; var percent = (1 - (this.loadingStep / this.loadingStepsCount)); var width = parseInt(parseInt($('progressBarBorder').getWidth()) * percent); /* var command = "if($('progressBar')) $('progressBar').style.width = '"+width+"px';"; setTimeout(command, 0); */ if(state){ $('progressState').value = state; } if($('progressBar')){ /* $('progressBar').style.width = width+'px'; */ var afterFinishFunc; if(parseInt(percent)==1){ afterFinishFunc = function(effect){ new Effect.Opacity('loading_overlay', { from:1.0, to:0, duration:0.3, afterFinish:function(effect){ $('loading_overlay').remove(); //if(ajaxplorer) ajaxplorer.actionBar.update(); } }); } } new Effect.Morph('progressBar',{ style:'width:'+width + 'px', duration:0.8, afterFinish:afterFinishFunc }); } if(this.loadingStep == 0){ //$('loading_overlay').remove(); this.pageLoading = false; } }, setCloseAction: function(func){ this.closeFunction = func; }, close: function(){ if(this.closeFunction){ this.closeFunction(); this.closeFunction = null; } } }); var modal = new Modal();
core/src/client/js/ajaxplorer/class.Modal.js
Modal = Class.create({ pageLoading: true, initialize: function(){ }, initForms: function(){ this.elementName = 'generic_dialog_box'; this.htmlElement = $(this.elementName); this.dialogTitle = this.htmlElement.getElementsBySelector(".dialogTitle")[0]; this.dialogContent = this.htmlElement.getElementsBySelector(".dialogContent")[0]; this.currentForm; this.cachedForms = new Hash(); this.iframeIndex = 0; }, prepareHeader: function(sTitle, sIconSrc){ var hString = "<span class=\"titleString\">"; if(sIconSrc != "") hString = "<span class=\"titleString\"><img src=\""+sIconSrc.replace('22', '16')+"\" width=\"16\" height=\"16\" align=\"top\"/>&nbsp;"; hString += sTitle + '</span>'; this.dialogTitle.innerHTML = hString; }, showDialogForm: function(sTitle, sFormId, fOnLoad, fOnComplete, fOnCancel, bOkButtonOnly, skipButtons){ this.clearContent(this.dialogContent); //this.dialogTitle.innerHTML = sTitle; var newForm; if($(sFormId).tagName == 'FORM') // WE PASSED A PREFETCHED HIDDEN FORM { newForm = $(sFormId); newForm.show(); } else { var formDiv = $(sFormId); //var formDiv = $('all_forms').select('[id="'+sFormId+'"]')[0]; var newForm = document.createElement('form'); newForm.id = 'action_form'; newForm.setAttribute('action', 'cont.php'); newForm.appendChild(formDiv.cloneNode(true)); var reloadIFrame = null; if($(newForm).getElementsByTagName("iframe")[0]) { reloadIFrame = $(newForm).getElementsByTagName("iframe")[0]; reloadIFrameSrc = $(newForm).getElementsByTagName("iframe")[0].getAttribute("src"); } if(formDiv.getAttribute('action')) { var actionField = document.createElement('input'); actionField.setAttribute('type', 'hidden'); actionField.setAttribute('name', 'get_action'); actionField.setAttribute('value', formDiv.getAttribute('action')); newForm.appendChild(actionField); } } if(!this.cachedForms.get(sFormId) && !skipButtons){ this.addSubmitCancel(newForm, fOnCancel, bOkButtonOnly); } this.dialogContent.appendChild(newForm); if(fOnComplete) { newForm.onsubmit = function(){ try{ fOnComplete(); }catch(e){ alert('Unexpected Error : please report!\n'+e); } return false; } } else { newForm.onsubmit = function(){ ajaxplorer.actionBar.submitForm(modal.getForm()); hideLightBox(); return false; }; } this.showContent(this.elementName, $(sFormId).getAttribute("box_width"), $(sFormId).getAttribute("box_height")); if($(newForm).getElementsBySelector(".dialogFocus").length) { objToFocus = $(newForm).getElementsBySelector(".dialogFocus")[0]; setTimeout('objToFocus.focus()', 500); } if($(newForm).getElementsBySelector(".replace_rep").length) { repDisplay = $(newForm).getElementsBySelector(".replace_rep")[0]; repDisplay.innerHTML = ajaxplorer.filesList.getCurrentRep(); } if($(newForm).getElementsBySelector(".replace_file").length) { repDisplay = $(newForm).getElementsBySelector(".replace_file")[0]; repDisplay.innerHTML = getBaseName(ajaxplorer.filesList.getUserSelection().getUniqueFileName()); } this.currentForm = newForm; if(fOnLoad != null) { fOnLoad(this.currentForm); } // SAFARI => FORCE IFRAME RELOADING if(reloadIFrame) reloadIFrame.src = reloadIFrameSrc; }, showContent: function(elementName, boxWidth, boxHeight){ ajaxplorer.disableShortcuts(); ajaxplorer.disableNavigation(); ajaxplorer.filesList.blur(); //jQuery('#'+elementName).corner("round top 5px"); if(Prototype.Browser.IE){ jQuery('#'+elementName + ' .dialogTitle').corner("round top 7px"); }else{ jQuery('#'+elementName).corner("round top 7px"); } if(!this.isRounded) { ajxpCorners($(elementName), 'bottom'); this.isRounded = true; } var winWidth = $(document.body).getWidth(); var winHeight = $(document.body).getHeight(); // WIDTH / HEIGHT if(boxWidth != null){ if(boxWidth.indexOf('%') > -1){ percentWidth = parseInt(boxWidth); boxWidth = parseInt((winWidth * percentWidth) / 100); } $(elementName).setStyle({width:boxWidth+'px'}); } if(boxHeight != null){ if(boxHeight.indexOf('%') > -1){ percentHeight = parseInt(boxHeight); boxHeight = parseInt((winHeight * percentHeight) / 100); } $(elementName).setStyle({height:boxHeight+'px'}); }else{ if (Prototype.Browser.IE){ $(elementName).setStyle({height:'1%'}); }else{ $(elementName).setStyle({height:'auto'}); } } // POSITION HORIZONTAL boxWidth = $(elementName).getWidth(); var offsetLeft = (winWidth - parseInt(boxWidth)) / 2; $(elementName).setStyle({left:offsetLeft+'px'}); // POSITION VERTICAL var boxHeight = $(elementName).getHeight(); var offsetTop = parseInt(((winHeight - boxHeight)/3)); $(elementName).setStyle({top:offsetTop+'px'}); //if (Prototype.Browser.IE){ //} displayLightBoxById(elementName); // FORCE ABSOLUTE FOR SAFARI! $(elementName).style.position = 'absolute'; // REFRESH PNG IMAGES FOR IE! refreshPNGImages(this.dialogContent); }, getForm: function() { return this.currentForm; }, clearContent: function(object){ // REMOVE CURRENT FORM, IF ANY if(object.getElementsBySelector("form").length) { var oThis = this; object.getElementsBySelector("form").each(function(currentForm){ if(currentForm.target == 'hidden_iframe' || currentForm.id=='login_form' || currentForm.id=='user_pref_form'){ currentForm.hide(); oThis.cachedForms.set(currentForm.id,true); } else{ object.removeChild(currentForm); } }); } }, addSubmitCancel: function(oForm, fOnCancel, bOkButtonOnly){ var contDiv = document.createElement('div'); contDiv.className = 'dialogButtons'; var okButton = document.createElement('input'); okButton.setAttribute('type', 'submit'); okButton.setAttribute('name', 'sub'); okButton.setAttribute('value', MessageHash[48]); $(okButton).addClassName('dialogButton'); $(okButton).addClassName('dialogFocus'); contDiv.appendChild(okButton); if(!bOkButtonOnly) { var caButton = document.createElement('input'); caButton.setAttribute('type', 'button'); caButton.setAttribute('name', 'can'); caButton.setAttribute('value', MessageHash[49]); $(caButton).addClassName('dialogButton'); if(fOnCancel){ caButton.onclick = function(){fOnCancel();hideLightBox();return false;}; } else{ caButton.onclick = function(){hideLightBox();return false;}; } contDiv.appendChild(caButton); } oForm.appendChild(contDiv); oForm.hasButtons = true; }, setLoadingStepCounts: function(count){ this.loadingStepsCount = count; this.loadingStep = count; }, incrementStepCounts: function(add){ this.loadingStepsCount += add; this.loadingStep += add; }, updateLoadingProgress: function(state){ this.loadingStep --; var percent = (1 - (this.loadingStep / this.loadingStepsCount)); var width = parseInt(parseInt($('progressBarBorder').getWidth()) * percent); /* var command = "if($('progressBar')) $('progressBar').style.width = '"+width+"px';"; setTimeout(command, 0); */ if(state){ $('progressState').value = state; } if($('progressBar')){ /* $('progressBar').style.width = width+'px'; */ var afterFinishFunc; if(parseInt(percent)==1){ afterFinishFunc = function(effect){ new Effect.Opacity('loading_overlay', { from:1.0, to:0, duration:0.3, afterFinish:function(effect){ $('loading_overlay').remove(); //if(ajaxplorer) ajaxplorer.actionBar.update(); } }); } } new Effect.Morph('progressBar',{ style:'width:'+width + 'px', duration:0.8, afterFinish:afterFinishFunc }); } if(this.loadingStep == 0){ //$('loading_overlay').remove(); this.pageLoading = false; } }, setCloseAction: function(func){ this.closeFunction = func; }, close: function(){ if(this.closeFunction){ this.closeFunction(); this.closeFunction = null; } } }); var modal = new Modal();
Set modal dialog position to 'fixed' for Firefox problems with cursor disappearing
core/src/client/js/ajaxplorer/class.Modal.js
Set modal dialog position to 'fixed' for Firefox problems with cursor disappearing
<ide><path>ore/src/client/js/ajaxplorer/class.Modal.js <ide> var boxHeight = $(elementName).getHeight(); <ide> var offsetTop = parseInt(((winHeight - boxHeight)/3)); <ide> $(elementName).setStyle({top:offsetTop+'px'}); <del> <del> //if (Prototype.Browser.IE){ <ide> <del> //} <del> <del> displayLightBoxById(elementName); <del> <del> // FORCE ABSOLUTE FOR SAFARI! <add> displayLightBoxById(elementName); <add> <add> // FORCE ABSOLUTE FOR SAFARI <ide> $(elementName).style.position = 'absolute'; <add> // FORCE FIXED FOR FIREFOX <add> if (Prototype.Browser.Gecko){ <add> $(elementName).style.position = 'fixed'; <add> } <add> <ide> // REFRESH PNG IMAGES FOR IE! <ide> refreshPNGImages(this.dialogContent); <ide> },
Java
apache-2.0
5e5dc112b0cbf6d177d047ec612e5d980fec9489
0
wyona/yanel,wyona/yanel,wyona/yanel,wyona/yanel,wyona/yanel,wyona/yanel
/* * Copyright 2010 - 2017 Wyona */ package org.wyona.yanel.impl.resources.yaneluser; import org.wyona.yanel.core.ResourceConfiguration; import org.wyona.yanel.core.util.MailUtil; import org.wyona.yanel.impl.resources.BasicXMLResource; import org.wyona.security.core.api.Identity; import org.wyona.security.core.api.User; import org.wyona.yanel.servlet.YanelServlet; import java.io.ByteArrayInputStream; import java.io.InputStream; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.LogManager; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.wyona.commons.xml.XMLHelper; /** * A resource to edit/update the profile of a user */ public class EditYanelUserProfileResource extends BasicXMLResource { private static Logger log = LogManager.getLogger(EditYanelUserProfileResource.class); private String transformerParameterName; private String transformerParameterValue; private static final String USER_PROP_NAME = "user"; /* * @see org.wyona.yanel.impl.resources.BasicXMLResource#getContentXML(String) */ protected InputStream getContentXML(String viewId) throws Exception { if (log.isDebugEnabled()) { log.debug("requested viewId: " + viewId); } String oldPassword = getEnvironment().getRequest().getParameter("oldPassword"); if (oldPassword != null) { String newPassword = getEnvironment().getRequest().getParameter("newPassword"); String newPasswordConfirmed = getEnvironment().getRequest().getParameter("newPasswordConfirmation"); updatePassword(oldPassword, newPassword, newPasswordConfirmed); } String email = getEnvironment().getRequest().getParameter("email"); boolean emailUpdated = false; if (email != null) { emailUpdated = updateProfile(email); log.info("Email '" + email + "' has been updated: " + emailUpdated); } try { return getXMLAsStream(emailUpdated); } catch(Exception e) { log.error(e, e); return null; } } /** * Get user profile as XML as stream * @param emailUpdated Flag whether email got updated successfully */ private java.io.InputStream getXMLAsStream(boolean emailUpdated) throws Exception { String userId = getUserId(); if (userId != null) { Document doc = getUserProfile(userId, emailUpdated); return XMLHelper.getInputStream(doc, false, true, null); } else { return new java.io.StringBufferInputStream("<no-user-id/>"); } } /** * Get user profile as DOM XML * @param userID ID of user * @param emailUpdated Flag whether email got updated successfully */ protected Document getUserProfile(String userId, boolean emailUpdated) throws Exception { User user = realm.getIdentityManager().getUserManager().getUser(userId); Document doc = XMLHelper.createDocument(null, "user"); Element rootEl = doc.getDocumentElement(); rootEl.setAttribute("id", userId); rootEl.setAttribute("email", user.getEmail()); rootEl.setAttribute("lamguage", user.getLanguage()); // DEPRECATED rootEl.setAttribute("language", user.getLanguage()); if (emailUpdated) { rootEl.setAttribute("email-saved-successfully", "true"); } Element nameEl = doc.createElement("name"); nameEl.setTextContent(user.getName()); rootEl.appendChild(nameEl); Element realmEl = doc.createElement("realm"); rootEl.appendChild(realmEl); String[] languages = getRealm().getLanguages(); if (languages != null && languages.length > 0) { Element supportedLanguagesEl = doc.createElement("languages"); // TODO: Set default language String defaultLanguage = getRealm().getDefaultLanguage(); realmEl.appendChild(supportedLanguagesEl); for (int i = 0; i < languages.length; i++) { Element languageEl = doc.createElement("language"); languageEl.setTextContent(languages[i]); supportedLanguagesEl.appendChild(languageEl); } } Element expirationDateEl = doc.createElement("expiration-date"); expirationDateEl.setTextContent("" + user.getExpirationDate()); rootEl.appendChild(expirationDateEl); Element descEl = doc.createElement("description"); descEl.setTextContent("" + user.getDescription()); rootEl.appendChild(descEl); org.wyona.security.core.api.Group[] groups = user.getGroups(); if (groups != null && groups.length > 0) { Element groupsEl = doc.createElement("groups"); rootEl.appendChild(groupsEl); for (int i = 0; i < groups.length; i++) { Element groupEl = doc.createElement("group"); groupEl.setAttribute("id", groups[i].getID()); groupsEl.appendChild(groupEl); } } String[] aliases = user.getAliases(); if (aliases != null && aliases.length > 0) { Element aliasesEl = (Element) rootEl.appendChild(doc.createElement("aliases")); for (int i = 0; i < aliases.length; i++) { Element aliasEl = (Element) aliasesEl.appendChild(doc.createElement("alias")); aliasEl.appendChild(doc.createTextNode(aliases[i])); } } else { rootEl.appendChild(doc.createElement("no-aliases")); } org.wyona.security.core.UserHistory history = user.getHistory(); if (history != null) { java.util.List<org.wyona.security.core.UserHistory.HistoryEntry> entries = history.getHistory(); if (entries != null) { for (org.wyona.security.core.UserHistory.HistoryEntry entry: entries) { Element historyEl = (Element) rootEl.appendChild(doc.createElement("history")); Element eventEl = (Element) historyEl.appendChild(doc.createElement("event")); eventEl.setAttribute("usecase", entry.getUsecase()); eventEl.setAttribute("description", entry.getDescription()); eventEl.setAttribute("date", "" + entry.getDate()); } } } return doc; } /** * Get user ID, whereas check various options, such as 1) query string, 2) resource configuration, 3) URL and 4) session */ protected String getUserId() throws Exception { String userId = null; // 1) userId = getEnvironment().getRequest().getParameter("id"); if (userId != null) { return userId; /* if (getRealm().getPolicyManager().authorize("/yanel/users/" + userId + ".html", getEnvironment().getIdentity(), new org.wyona.security.core.api.Usecase("view"))) { // INFO: Because the policymanager has no mean to check (or interpret) query strings we need to recheck programmatically return userId; } else { //throw new Exception("User '" + getEnvironment().getIdentity().getUsername() + "' tries to access user profile '" + userId + "', but is not authorized!"); log.warn("User '" + getEnvironment().getIdentity().getUsername() + "' tries to access user profile '" + userId + "', but is not authorized!"); } */ } else { log.debug("User ID is not part of query string."); } // 2) userId = getResourceConfigProperty(USER_PROP_NAME); if (userId != null) { return userId; } else { log.debug("User ID is not configured inside resource configuration."); } // 3) userId = getPath().substring(getPath().lastIndexOf("/") + 1, getPath().lastIndexOf(".html")); if (userId != null && getRealm().getIdentityManager().getUserManager().existsUser(userId)) { return userId; } else { log.debug("Could not retrieve user ID from URL."); } // 4) userId = getEnvironment().getIdentity().getUsername(); if (userId != null) { return userId; } else { log.warn("User does not seem to be signed in!"); } throw new Exception("Cannot retrieve user ID!"); } /** * Change user password * @param oldPassword Existing current password */ protected void updatePassword(String oldPassword, String newPassword, String newPasswordConfirmed) throws Exception { String userId = getUserId(); if (!getRealm().getIdentityManager().getUserManager().getUser(userId).authenticate(oldPassword)) { setTransformerParameter("error", "Authentication of user '" +userId + "' failed!"); log.error("Authentication of user '" + userId + "' failed!"); return; } if (newPassword != null && !newPassword.equals("")) { if (newPassword.equals(newPasswordConfirmed)) { User user = getRealm().getIdentityManager().getUserManager().getUser(userId); user.setPassword(newPassword); user.save(); setTransformerParameter("success", "Password updated successfully"); } else { setTransformerParameter("error", "New password and its confirmation do not match!"); } } else { setTransformerParameter("error", "No new password was specified!"); } } /** * Update the email address (and possibly also the alias) inside user profile * @param email New email address of user (and possibly also alias) * @return true if update was successful and false otherwise */ protected boolean updateProfile(String email) throws Exception { if (email == null || ("").equals(email)) { setTransformerParameter("error", "emailNotSet"); log.warn("No email (or empty email) specified, hence do not update email address!"); return false; } else if (!validateEmail(email)) { setTransformerParameter("error", "emailNotValid"); log.warn("Email '" + email + "' is not valid!"); return false; } else { try { String userId = getUserId(); org.wyona.security.core.api.UserManager userManager = realm.getIdentityManager().getUserManager(); User user = userManager.getUser(userId); user.setName(getEnvironment().getRequest().getParameter("userName")); user.setLanguage(getEnvironment().getRequest().getParameter("user-profile-language")); user.save(); updateSession(user); String previousEmailAddress = user.getEmail(); if (!previousEmailAddress.equals(email)) { user.setEmail(email); user.save(); if (userManager.existsAlias(previousEmailAddress)) { if (!userManager.existsAlias(email)) { userManager.createAlias(email, userId); } else { if (hasAlias(user, email)) { log.warn("DEBUG: User '" + userId + "' already has alias '" + email + "'."); } else { throw new Exception("Alias '" + email + "' already exists, but is not associated with user '" + userId + "'!"); } } if (hasAlias(user, previousEmailAddress)) { userManager.removeAlias(previousEmailAddress); log.warn("Previous alias '" + previousEmailAddress + "' removed, which means user needs to use new email '" + email + "' to login."); sendNotification(previousEmailAddress, email); // TODO/TBD: Logout user and tell user why he/she was signed out } } else { log.warn("Previous email '" + previousEmailAddress + "' was not used as alias, hence we also use new email '" + email + "' not as alias."); } } else { log.warn("DEBUG: Current email and new email are the same."); if (!userManager.existsAlias(email)) { log.warn("Email '" + email + "' is not used as alias yet!"); } } setTransformerParameter("success", "E-Mail (and alias) updated successfully"); return true; } catch (Exception e) { log.error(e, e); setTransformerParameter("error", e.getMessage()); return false; } } } /** * Send notifications to previous and new emails that login alias has changed */ private void sendNotification(String previousEmail, String newEmail) throws Exception { String from = getResourceConfigProperty("fromEmail"); if (from != null) { String subject = "[" + getRealm().getName() + "] Username changed"; String body = "Please note that you must use '" + newEmail + "' instead '" + previousEmail + "' to login."; try { MailUtil.send(from, previousEmail, subject, body); } catch(Exception e) { log.error(e, e); } try { MailUtil.send(from, newEmail, subject, body); } catch(Exception e) { log.error(e, e); } } else { log.warn("No 'from' email address inside resource configuration set, hence no notifications about changed username will be sent!"); } } /** * Update identity attached to session */ private void updateSession(User user) throws Exception { YanelServlet.setIdentity(new Identity(user, user.getEmail()), getEnvironment().getRequest().getSession(true), getRealm()); } /** * Check whether user has a specific alias * @return true when user has a specific alias */ private boolean hasAlias(User user, String alias) throws Exception { String[] aliases = user.getAliases(); for (int i = 0; i < aliases.length; i++) { if (aliases[i].equals(alias)) { return true; } } return false; } /** * */ private void setTransformerParameter(String name, String value) { transformerParameterName = name; transformerParameterValue = value; } /** * @see org.wyona.yanel.impl.resources.BasicXMLResource#passTransformerParameters(Transformer) */ @Override protected void passTransformerParameters(javax.xml.transform.Transformer transformer) throws Exception { super.passTransformerParameters(transformer); try { if (transformerParameterName != null && transformerParameterValue != null) { transformer.setParameter(transformerParameterName, transformerParameterValue); transformerParameterName = null; transformerParameterValue = null; } } catch (Exception e) { log.error(e, e); } } /** * This method checks if the specified email is valid against a regex * * @param email * @return true if email is valid */ private boolean validateEmail(String email) { String emailRegEx = "(\\w+)@(\\w+\\.)(\\w+)(\\.\\w+)*"; Pattern pattern = Pattern.compile(emailRegEx); Matcher matcher = pattern.matcher(email); return matcher.find(); } }
src/resources/yanel-user/src/java/org/wyona/yanel/impl/resources/yaneluser/EditYanelUserProfileResource.java
/* * Copyright 2010 Wyona */ package org.wyona.yanel.impl.resources.yaneluser; import org.wyona.yanel.core.ResourceConfiguration; import org.wyona.yanel.core.util.MailUtil; import org.wyona.yanel.impl.resources.BasicXMLResource; import org.wyona.security.core.api.Identity; import org.wyona.security.core.api.User; import org.wyona.yanel.servlet.YanelServlet; import java.io.ByteArrayInputStream; import java.io.InputStream; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.LogManager; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.wyona.commons.xml.XMLHelper; /** * A resource to edit/update the profile of a user */ public class EditYanelUserProfileResource extends BasicXMLResource { private static Logger log = LogManager.getLogger(EditYanelUserProfileResource.class); private String transformerParameterName; private String transformerParameterValue; private static final String USER_PROP_NAME = "user"; /* * @see org.wyona.yanel.impl.resources.BasicXMLResource#getContentXML(String) */ protected InputStream getContentXML(String viewId) throws Exception { if (log.isDebugEnabled()) { log.debug("requested viewId: " + viewId); } String oldPassword = getEnvironment().getRequest().getParameter("oldPassword"); if (oldPassword != null) { String newPassword = getEnvironment().getRequest().getParameter("newPassword"); String newPasswordConfirmed = getEnvironment().getRequest().getParameter("newPasswordConfirmation"); updatePassword(oldPassword, newPassword, newPasswordConfirmed); } String email = getEnvironment().getRequest().getParameter("email"); boolean emailUpdated = false; if (email != null) { emailUpdated = updateProfile(email); log.info("Email '" + email + "' has been updated: " + emailUpdated); } try { return getXMLAsStream(emailUpdated); } catch(Exception e) { log.error(e, e); return null; } } /** * Get user profile as XML as stream * @param emailUpdated Flag whether email got updated successfully */ private java.io.InputStream getXMLAsStream(boolean emailUpdated) throws Exception { String userId = getUserId(); if (userId != null) { Document doc = getUserProfile(userId, emailUpdated); return XMLHelper.getInputStream(doc, false, true, null); } else { return new java.io.StringBufferInputStream("<no-user-id/>"); } } /** * Get user profile as DOM XML * @param userID ID of user * @param emailUpdated Flag whether email got updated successfully */ protected Document getUserProfile(String userId, boolean emailUpdated) throws Exception { User user = realm.getIdentityManager().getUserManager().getUser(userId); Document doc = XMLHelper.createDocument(null, "user"); Element rootEl = doc.getDocumentElement(); rootEl.setAttribute("id", userId); rootEl.setAttribute("email", user.getEmail()); rootEl.setAttribute("lamguage", user.getLanguage()); // DEPRECATED rootEl.setAttribute("language", user.getLanguage()); if (emailUpdated) { rootEl.setAttribute("email-saved-successfully", "true"); } Element nameEl = doc.createElement("name"); nameEl.setTextContent(user.getName()); rootEl.appendChild(nameEl); Element expirationDateEl = doc.createElement("expiration-date"); expirationDateEl.setTextContent("" + user.getExpirationDate()); rootEl.appendChild(expirationDateEl); Element descEl = doc.createElement("description"); descEl.setTextContent("" + user.getDescription()); rootEl.appendChild(descEl); org.wyona.security.core.api.Group[] groups = user.getGroups(); if (groups != null && groups.length > 0) { Element groupsEl = doc.createElement("groups"); rootEl.appendChild(groupsEl); for (int i = 0; i < groups.length; i++) { Element groupEl = doc.createElement("group"); groupEl.setAttribute("id", groups[i].getID()); groupsEl.appendChild(groupEl); } } String[] aliases = user.getAliases(); if (aliases != null && aliases.length > 0) { Element aliasesEl = (Element) rootEl.appendChild(doc.createElement("aliases")); for (int i = 0; i < aliases.length; i++) { Element aliasEl = (Element) aliasesEl.appendChild(doc.createElement("alias")); aliasEl.appendChild(doc.createTextNode(aliases[i])); } } else { rootEl.appendChild(doc.createElement("no-aliases")); } org.wyona.security.core.UserHistory history = user.getHistory(); if (history != null) { java.util.List<org.wyona.security.core.UserHistory.HistoryEntry> entries = history.getHistory(); if (entries != null) { for (org.wyona.security.core.UserHistory.HistoryEntry entry: entries) { Element historyEl = (Element) rootEl.appendChild(doc.createElement("history")); Element eventEl = (Element) historyEl.appendChild(doc.createElement("event")); eventEl.setAttribute("usecase", entry.getUsecase()); eventEl.setAttribute("description", entry.getDescription()); eventEl.setAttribute("date", "" + entry.getDate()); } } } return doc; } /** * Get user ID, whereas check various options, such as 1) query string, 2) resource configuration, 3) URL and 4) session */ protected String getUserId() throws Exception { String userId = null; // 1) userId = getEnvironment().getRequest().getParameter("id"); if (userId != null) { return userId; /* if (getRealm().getPolicyManager().authorize("/yanel/users/" + userId + ".html", getEnvironment().getIdentity(), new org.wyona.security.core.api.Usecase("view"))) { // INFO: Because the policymanager has no mean to check (or interpret) query strings we need to recheck programmatically return userId; } else { //throw new Exception("User '" + getEnvironment().getIdentity().getUsername() + "' tries to access user profile '" + userId + "', but is not authorized!"); log.warn("User '" + getEnvironment().getIdentity().getUsername() + "' tries to access user profile '" + userId + "', but is not authorized!"); } */ } else { log.debug("User ID is not part of query string."); } // 2) userId = getResourceConfigProperty(USER_PROP_NAME); if (userId != null) { return userId; } else { log.debug("User ID is not configured inside resource configuration."); } // 3) userId = getPath().substring(getPath().lastIndexOf("/") + 1, getPath().lastIndexOf(".html")); if (userId != null && getRealm().getIdentityManager().getUserManager().existsUser(userId)) { return userId; } else { log.debug("Could not retrieve user ID from URL."); } // 4) userId = getEnvironment().getIdentity().getUsername(); if (userId != null) { return userId; } else { log.warn("User does not seem to be signed in!"); } throw new Exception("Cannot retrieve user ID!"); } /** * Change user password * @param oldPassword Existing current password */ protected void updatePassword(String oldPassword, String newPassword, String newPasswordConfirmed) throws Exception { String userId = getUserId(); if (!getRealm().getIdentityManager().getUserManager().getUser(userId).authenticate(oldPassword)) { setTransformerParameter("error", "Authentication of user '" +userId + "' failed!"); log.error("Authentication of user '" + userId + "' failed!"); return; } if (newPassword != null && !newPassword.equals("")) { if (newPassword.equals(newPasswordConfirmed)) { User user = getRealm().getIdentityManager().getUserManager().getUser(userId); user.setPassword(newPassword); user.save(); setTransformerParameter("success", "Password updated successfully"); } else { setTransformerParameter("error", "New password and its confirmation do not match!"); } } else { setTransformerParameter("error", "No new password was specified!"); } } /** * Update the email address (and possibly also the alias) inside user profile * @param email New email address of user (and possibly also alias) * @return true if update was successful and false otherwise */ protected boolean updateProfile(String email) throws Exception { if (email == null || ("").equals(email)) { setTransformerParameter("error", "emailNotSet"); log.warn("No email (or empty email) specified, hence do not update email address!"); return false; } else if (!validateEmail(email)) { setTransformerParameter("error", "emailNotValid"); log.warn("Email '" + email + "' is not valid!"); return false; } else { try { String userId = getUserId(); org.wyona.security.core.api.UserManager userManager = realm.getIdentityManager().getUserManager(); User user = userManager.getUser(userId); user.setName(getEnvironment().getRequest().getParameter("userName")); user.setLanguage(getEnvironment().getRequest().getParameter("user-profile-language")); user.save(); updateSession(user); String previousEmailAddress = user.getEmail(); if (!previousEmailAddress.equals(email)) { user.setEmail(email); user.save(); if (userManager.existsAlias(previousEmailAddress)) { if (!userManager.existsAlias(email)) { userManager.createAlias(email, userId); } else { if (hasAlias(user, email)) { log.warn("DEBUG: User '" + userId + "' already has alias '" + email + "'."); } else { throw new Exception("Alias '" + email + "' already exists, but is not associated with user '" + userId + "'!"); } } if (hasAlias(user, previousEmailAddress)) { userManager.removeAlias(previousEmailAddress); log.warn("Previous alias '" + previousEmailAddress + "' removed, which means user needs to use new email '" + email + "' to login."); sendNotification(previousEmailAddress, email); // TODO/TBD: Logout user and tell user why he/she was signed out } } else { log.warn("Previous email '" + previousEmailAddress + "' was not used as alias, hence we also use new email '" + email + "' not as alias."); } } else { log.warn("DEBUG: Current email and new email are the same."); if (!userManager.existsAlias(email)) { log.warn("Email '" + email + "' is not used as alias yet!"); } } setTransformerParameter("success", "E-Mail (and alias) updated successfully"); return true; } catch (Exception e) { log.error(e, e); setTransformerParameter("error", e.getMessage()); return false; } } } /** * Send notifications to previous and new emails that login alias has changed */ private void sendNotification(String previousEmail, String newEmail) throws Exception { String from = getResourceConfigProperty("fromEmail"); if (from != null) { String subject = "[" + getRealm().getName() + "] Username changed"; String body = "Please note that you must use '" + newEmail + "' instead '" + previousEmail + "' to login."; try { MailUtil.send(from, previousEmail, subject, body); } catch(Exception e) { log.error(e, e); } try { MailUtil.send(from, newEmail, subject, body); } catch(Exception e) { log.error(e, e); } } else { log.warn("No 'from' email address inside resource configuration set, hence no notifications about changed username will be sent!"); } } /** * Update identity attached to session */ private void updateSession(User user) throws Exception { YanelServlet.setIdentity(new Identity(user, user.getEmail()), getEnvironment().getRequest().getSession(true), getRealm()); } /** * Check whether user has a specific alias * @return true when user has a specific alias */ private boolean hasAlias(User user, String alias) throws Exception { String[] aliases = user.getAliases(); for (int i = 0; i < aliases.length; i++) { if (aliases[i].equals(alias)) { return true; } } return false; } /** * */ private void setTransformerParameter(String name, String value) { transformerParameterName = name; transformerParameterValue = value; } /** * @see org.wyona.yanel.impl.resources.BasicXMLResource#passTransformerParameters(Transformer) */ @Override protected void passTransformerParameters(javax.xml.transform.Transformer transformer) throws Exception { super.passTransformerParameters(transformer); try { if (transformerParameterName != null && transformerParameterValue != null) { transformer.setParameter(transformerParameterName, transformerParameterValue); transformerParameterName = null; transformerParameterValue = null; } } catch (Exception e) { log.error(e, e); } } /** * This method checks if the specified email is valid against a regex * * @param email * @return true if email is valid */ private boolean validateEmail(String email) { String emailRegEx = "(\\w+)@(\\w+\\.)(\\w+)(\\.\\w+)*"; Pattern pattern = Pattern.compile(emailRegEx); Matcher matcher = pattern.matcher(email); return matcher.find(); } }
languages supported by realm added
src/resources/yanel-user/src/java/org/wyona/yanel/impl/resources/yaneluser/EditYanelUserProfileResource.java
languages supported by realm added
<ide><path>rc/resources/yanel-user/src/java/org/wyona/yanel/impl/resources/yaneluser/EditYanelUserProfileResource.java <ide> /* <del> * Copyright 2010 Wyona <add> * Copyright 2010 - 2017 Wyona <ide> */ <ide> package org.wyona.yanel.impl.resources.yaneluser; <ide> <ide> Element nameEl = doc.createElement("name"); <ide> nameEl.setTextContent(user.getName()); <ide> rootEl.appendChild(nameEl); <add> <add> Element realmEl = doc.createElement("realm"); <add> rootEl.appendChild(realmEl); <add> String[] languages = getRealm().getLanguages(); <add> if (languages != null && languages.length > 0) { <add> Element supportedLanguagesEl = doc.createElement("languages"); <add> // TODO: Set default language <add> String defaultLanguage = getRealm().getDefaultLanguage(); <add> realmEl.appendChild(supportedLanguagesEl); <add> for (int i = 0; i < languages.length; i++) { <add> Element languageEl = doc.createElement("language"); <add> languageEl.setTextContent(languages[i]); <add> supportedLanguagesEl.appendChild(languageEl); <add> } <add> } <ide> <ide> Element expirationDateEl = doc.createElement("expiration-date"); <ide> expirationDateEl.setTextContent("" + user.getExpirationDate());
Java
apache-2.0
145530bc3fae3cb03acddc607b8f647f7fff1a26
0
edgarRd/incubator-tinkerpop,robertdale/tinkerpop,vtslab/incubator-tinkerpop,mike-tr-adamson/incubator-tinkerpop,rmagen/incubator-tinkerpop,n-tran/incubator-tinkerpop,apache/incubator-tinkerpop,BrynCooke/incubator-tinkerpop,samiunn/incubator-tinkerpop,n-tran/incubator-tinkerpop,jorgebay/tinkerpop,velo/incubator-tinkerpop,rmagen/incubator-tinkerpop,samiunn/incubator-tinkerpop,gdelafosse/incubator-tinkerpop,gdelafosse/incubator-tinkerpop,jorgebay/tinkerpop,samiunn/incubator-tinkerpop,PommeVerte/incubator-tinkerpop,RedSeal-co/incubator-tinkerpop,BrynCooke/incubator-tinkerpop,edgarRd/incubator-tinkerpop,robertdale/tinkerpop,krlohnes/tinkerpop,robertdale/tinkerpop,mike-tr-adamson/incubator-tinkerpop,newkek/incubator-tinkerpop,apache/tinkerpop,rmagen/incubator-tinkerpop,robertdale/tinkerpop,pluradj/incubator-tinkerpop,RedSeal-co/incubator-tinkerpop,gdelafosse/incubator-tinkerpop,pluradj/incubator-tinkerpop,krlohnes/tinkerpop,krlohnes/tinkerpop,RussellSpitzer/incubator-tinkerpop,edgarRd/incubator-tinkerpop,apache/tinkerpop,RussellSpitzer/incubator-tinkerpop,apache/tinkerpop,jorgebay/tinkerpop,PommeVerte/incubator-tinkerpop,jorgebay/tinkerpop,BrynCooke/incubator-tinkerpop,mike-tr-adamson/incubator-tinkerpop,dalaro/incubator-tinkerpop,newkek/incubator-tinkerpop,apache/tinkerpop,newkek/incubator-tinkerpop,apache/tinkerpop,artem-aliev/tinkerpop,artem-aliev/tinkerpop,apache/incubator-tinkerpop,krlohnes/tinkerpop,apache/incubator-tinkerpop,artem-aliev/tinkerpop,vtslab/incubator-tinkerpop,RedSeal-co/incubator-tinkerpop,PommeVerte/incubator-tinkerpop,dalaro/incubator-tinkerpop,velo/incubator-tinkerpop,vtslab/incubator-tinkerpop,pluradj/incubator-tinkerpop,artem-aliev/tinkerpop,velo/incubator-tinkerpop,apache/tinkerpop,n-tran/incubator-tinkerpop,krlohnes/tinkerpop,artem-aliev/tinkerpop,RussellSpitzer/incubator-tinkerpop,apache/tinkerpop,dalaro/incubator-tinkerpop,robertdale/tinkerpop
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.tinkerpop.gremlin.server; import org.apache.tinkerpop.gremlin.driver.Client; import org.apache.tinkerpop.gremlin.driver.Cluster; import org.apache.tinkerpop.gremlin.driver.ResultSet; import org.apache.tinkerpop.gremlin.process.traversal.Path; import org.apache.tinkerpop.gremlin.structure.Edge; import org.apache.tinkerpop.gremlin.structure.Property; import org.apache.tinkerpop.gremlin.structure.Vertex; import org.apache.tinkerpop.gremlin.structure.VertexProperty; import org.apache.tinkerpop.gremlin.structure.util.detached.DetachedEdge; import org.apache.tinkerpop.gremlin.structure.util.detached.DetachedPath; import org.apache.tinkerpop.gremlin.structure.util.detached.DetachedProperty; import org.apache.tinkerpop.gremlin.structure.util.detached.DetachedVertex; import org.apache.tinkerpop.gremlin.structure.util.detached.DetachedVertexProperty; import org.junit.After; import org.junit.Before; import org.junit.Test; import java.util.Arrays; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.core.IsInstanceOf.instanceOf; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNull; /** * @author Stephen Mallette (http://stephen.genoprime.com) */ public class GremlinResultSetIntegrateTest extends AbstractGremlinServerIntegrationTest { private Cluster cluster; private Client client; @Override public Settings overrideSettings(final Settings settings) { settings.scriptEngines.get("gremlin-groovy").scripts = Arrays.asList("scripts/generate-modern.groovy"); return settings; } @Before public void beforeTest() { cluster = Cluster.open(); client = cluster.connect(); } @After public void afterTest() { cluster.close(); } @Test public void shouldHandleNullResult() throws Exception { final ResultSet results = client.submit("g.V().drop().iterate();null"); assertNull(results.all().get().get(0).getObject()); } @Test public void shouldHandleEmptyResult() throws Exception { final ResultSet results = client.submit("g.V(100,1000,1000)"); assertEquals(0, results.all().get().size()); } @Test public void shouldHandleVertexResult() throws Exception { final ResultSet results = client.submit("g.V().next()"); final Vertex v = results.all().get().get(0).getVertex(); assertThat(v, instanceOf(DetachedVertex.class)); } @Test public void shouldHandleVertexPropertyResult() throws Exception { final ResultSet results = client.submit("g.V().properties('name').next()"); final VertexProperty<String> v = results.all().get().get(0).getVertexProperty(); assertThat(v, instanceOf(DetachedVertexProperty.class)); } @Test public void shouldHandleEdgeResult() throws Exception { final ResultSet results = client.submit("g.E().next()"); final Edge e = results.all().get().get(0).getEdge(); assertThat(e, instanceOf(DetachedEdge.class)); } @Test public void shouldHandlePropertyResult() throws Exception { final ResultSet results = client.submit("g.E().properties('weight').next()"); final Property<Double> p = results.all().get().get(0).getProperty(); assertThat(p, instanceOf(DetachedProperty.class)); } @Test public void shouldHandlePathResult() throws Exception { final ResultSet results = client.submit("g.V().out().path()"); final Path p = results.all().get().get(0).getPath(); assertThat(p, instanceOf(DetachedPath.class)); } }
gremlin-server/src/test/java/org/apache/tinkerpop/gremlin/server/GremlinResultSetIntegrateTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.tinkerpop.gremlin.server; import org.apache.tinkerpop.gremlin.driver.Client; import org.apache.tinkerpop.gremlin.driver.Cluster; import org.apache.tinkerpop.gremlin.driver.ResultSet; import org.apache.tinkerpop.gremlin.process.traversal.Path; import org.apache.tinkerpop.gremlin.structure.Edge; import org.apache.tinkerpop.gremlin.structure.Property; import org.apache.tinkerpop.gremlin.structure.Vertex; import org.apache.tinkerpop.gremlin.structure.VertexProperty; import org.apache.tinkerpop.gremlin.structure.util.detached.DetachedEdge; import org.apache.tinkerpop.gremlin.structure.util.detached.DetachedPath; import org.apache.tinkerpop.gremlin.structure.util.detached.DetachedProperty; import org.apache.tinkerpop.gremlin.structure.util.detached.DetachedVertex; import org.apache.tinkerpop.gremlin.structure.util.detached.DetachedVertexProperty; import org.junit.After; import org.junit.Before; import org.junit.Test; import java.util.Arrays; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.core.IsInstanceOf.instanceOf; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNull; /** * @author Stephen Mallette (http://stephen.genoprime.com) */ public class GremlinResultSetIntegrateTest extends AbstractGremlinServerIntegrationTest { private Cluster cluster; private Client client; @Override public Settings overrideSettings(final Settings settings) { settings.scriptEngines.get("gremlin-groovy").scripts = Arrays.asList("scripts/generate-modern.groovy"); return settings; } @Before public void beforeTest() { cluster = Cluster.open(); client = cluster.connect(); } @After public void afterTest() { cluster.close(); } @Test public void shouldHandleNullResult() throws Exception { final ResultSet results = client.submit("g.V().drop().iterate();null"); assertNull(results.all().get().get(0).getObject()); } @Test public void shouldHandleEmptyResult() throws Exception { final ResultSet results = client.submit("g.V(100,1000,1000)"); assertEquals(0, results.all().get().size()); } @Test public void shouldHandleVertexResult() throws Exception { final ResultSet results = client.submit("g.V().next()"); final Vertex v = results.all().get().get(0).getVertex(); assertThat(v, instanceOf(DetachedVertex.class)); } @Test public void shouldHandleVertexPropertyResult() throws Exception { final ResultSet results = client.submit("g.V().properties('name').next()"); final VertexProperty<String> v = results.all().get().get(0).getVertexProperty(); assertThat(v, instanceOf(DetachedVertexProperty.class)); } @Test public void shouldHandleEdgeResult() throws Exception { final ResultSet results = client.submit("g.E().next()"); final Edge e = results.all().get().get(0).getEdge(); assertThat(e, instanceOf(DetachedEdge.class)); } @Test public void shouldHandlePropertyResult() throws Exception { final ResultSet results = client.submit("g.E().properties('weight').next()"); final Property<Double> p = results.all().get().get(0).getProperty(); assertThat(p, instanceOf(DetachedProperty.class)); } @Test public void shouldHandlePathResult() throws Exception { final ResultSet results = client.submit("g.V().out().path().next()"); final Path p = results.all().get().get(0).getPath(); assertThat(p, instanceOf(DetachedPath.class)); } }
Now that Path implements Iterable Gremlin Server iterates it out. Altered a failing test to just return all paths so as to validate that a DetachedPath was being returned.
gremlin-server/src/test/java/org/apache/tinkerpop/gremlin/server/GremlinResultSetIntegrateTest.java
Now that Path implements Iterable Gremlin Server iterates it out.
<ide><path>remlin-server/src/test/java/org/apache/tinkerpop/gremlin/server/GremlinResultSetIntegrateTest.java <ide> <ide> @Test <ide> public void shouldHandlePathResult() throws Exception { <del> final ResultSet results = client.submit("g.V().out().path().next()"); <add> final ResultSet results = client.submit("g.V().out().path()"); <ide> final Path p = results.all().get().get(0).getPath(); <ide> assertThat(p, instanceOf(DetachedPath.class)); <ide> }
Java
epl-1.0
239fad21150b0695e0e4a6491ecee5092a039235
0
sebasbaumh/portfolio,buchen/portfolio,sebasbaumh/portfolio,buchen/portfolio,sebasbaumh/portfolio,sebasbaumh/portfolio,buchen/portfolio,buchen/portfolio
package name.abuchen.portfolio.ui.views; import java.text.DecimalFormat; import java.time.Instant; import java.time.LocalDate; import java.time.LocalDateTime; import java.time.Period; import java.time.ZoneId; import java.time.ZonedDateTime; import java.time.format.DateTimeFormatter; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Comparator; import java.util.Date; import java.util.EnumSet; import java.util.List; import java.util.Optional; import java.util.stream.Collectors; import org.eclipse.jface.action.Action; import org.eclipse.jface.action.IAction; import org.eclipse.jface.action.IMenuManager; import org.eclipse.jface.action.MenuManager; import org.eclipse.jface.action.ToolBarManager; import org.eclipse.swt.SWT; import org.eclipse.swt.events.PaintListener; import org.eclipse.swt.graphics.Color; import org.eclipse.swt.graphics.Point; import org.eclipse.swt.layout.FillLayout; import org.eclipse.swt.widgets.Composite; import org.eclipse.swt.widgets.Control; import org.eclipse.swt.widgets.Display; import org.eclipse.swt.widgets.Label; import org.swtchart.IAxis; import org.swtchart.ILegend; import org.swtchart.ILineSeries; import org.swtchart.ILineSeries.PlotSymbolType; import org.swtchart.ISeries; import org.swtchart.ISeries.SeriesType; import org.swtchart.LineStyle; import org.swtchart.Range; import com.google.common.primitives.Doubles; import com.ibm.icu.text.MessageFormat; import name.abuchen.portfolio.model.AccountTransaction; import name.abuchen.portfolio.model.AttributeType; import name.abuchen.portfolio.model.Client; import name.abuchen.portfolio.model.LimitPrice; import name.abuchen.portfolio.model.PortfolioTransaction; import name.abuchen.portfolio.model.Security; import name.abuchen.portfolio.model.SecurityEvent; import name.abuchen.portfolio.model.SecurityPrice; import name.abuchen.portfolio.model.Transaction; import name.abuchen.portfolio.model.Transaction.Unit; import name.abuchen.portfolio.model.TransactionPair; import name.abuchen.portfolio.money.CurrencyConverter; import name.abuchen.portfolio.money.Money; import name.abuchen.portfolio.money.Values; import name.abuchen.portfolio.snapshot.ClientSnapshot; import name.abuchen.portfolio.snapshot.filter.ClientSecurityFilter; import name.abuchen.portfolio.snapshot.filter.ReadOnlyClient; import name.abuchen.portfolio.snapshot.security.SecurityPerformanceIndicator; import name.abuchen.portfolio.snapshot.security.SecurityPerformanceSnapshot; import name.abuchen.portfolio.ui.Images; import name.abuchen.portfolio.ui.Messages; import name.abuchen.portfolio.ui.util.Colors; import name.abuchen.portfolio.ui.util.DropDown; import name.abuchen.portfolio.ui.util.SimpleAction; import name.abuchen.portfolio.ui.util.chart.TimelineChart; import name.abuchen.portfolio.ui.util.chart.TimelineChartToolTip; import name.abuchen.portfolio.util.FormatHelper; import name.abuchen.portfolio.util.Interval; import name.abuchen.portfolio.util.TradeCalendar; import name.abuchen.portfolio.util.TradeCalendarManager; /** * Chart of historical quotes for a given security */ public class SecuritiesChart { /** * A <em>closed</em> interval that includes start and end date. * <p/> * We create a separate {@code ChartInterval} class because - for historical * reasons - the {@link Interval} is half-open, i.e. it does not include the * start date. When working with charts, however, it is easier to work with * a closed interval that includes both start and end date. */ public static class ChartInterval { private final LocalDate start; private final LocalDate end; public ChartInterval(LocalDate start, LocalDate end) { this.start = start; this.end = end; } public LocalDate getStart() { return start; } public LocalDate getEnd() { return end; } public boolean contains(LocalDate other) { return !other.isBefore(start) && !other.isAfter(end); } public boolean contains(LocalDateTime other) { return contains(other.toLocalDate()); } } public enum IntervalOption { M1(Messages.SecurityTabChart1M, Messages.SecurityTabChart1MToolTip), // M2(Messages.SecurityTabChart2M, Messages.SecurityTabChart2MToolTip), // M6(Messages.SecurityTabChart6M, Messages.SecurityTabChart6MToolTip), // Y1(Messages.SecurityTabChart1Y, Messages.SecurityTabChart1YToolTip), // Y2(Messages.SecurityTabChart2Y, Messages.SecurityTabChart2YToolTip), // Y3(Messages.SecurityTabChart3Y, Messages.SecurityTabChart3YToolTip), // Y5(Messages.SecurityTabChart5Y, Messages.SecurityTabChart5YToolTip), // Y10(Messages.SecurityTabChart10Y, Messages.SecurityTabChart10YToolTip), // YTD(Messages.SecurityTabChartYTD, Messages.SecurityTabChartYTDToolTip), // H(Messages.SecurityTabChartHoldingPeriod, Messages.SecurityTabChartHoldingPeriodToolTip), // ALL(Messages.SecurityTabChartAll, Messages.SecurityTabChartAllToolTip); private final String label; private final String tooltip; private IntervalOption(String label, String tooltip) { this.label = label; this.tooltip = tooltip; } public String getLabel() { return label; } public String getTooltip() { return tooltip; } public ChartInterval getInverval(Client client, CurrencyConverter converter, Security security) { LocalDate now = LocalDate.now(); switch (this) { case M1: return new ChartInterval(now.minus(Period.ofMonths(1)), now); case M2: return new ChartInterval(now.minus(Period.ofMonths(2)), now); case M6: return new ChartInterval(now.minus(Period.ofMonths(6)), now); case Y1: return new ChartInterval(now.minus(Period.ofYears(1)), now); case Y2: return new ChartInterval(now.minus(Period.ofYears(2)), now); case Y3: return new ChartInterval(now.minus(Period.ofYears(3)), now); case Y5: return new ChartInterval(now.minus(Period.ofYears(5)), now); case Y10: return new ChartInterval(now.minus(Period.ofYears(10)), now); case YTD: return new ChartInterval(now.minus(Period.ofDays(now.getDayOfYear() - 1)), now); case H: List<TransactionPair<?>> tx = security.getTransactions(client); if (tx.isEmpty()) return new ChartInterval(now, now); Collections.sort(tx, new TransactionPair.ByDate()); boolean hasHoldings = ClientSnapshot.create(client, converter, LocalDate.now()) .getPositionsByVehicle().containsKey(security); return new ChartInterval(tx.get(0).getTransaction().getDateTime().toLocalDate(), hasHoldings ? LocalDate.now() : tx.get(tx.size() - 1).getTransaction().getDateTime().toLocalDate()); case ALL: List<SecurityPrice> prices = security.getPricesIncludingLatest(); if (prices.isEmpty()) return new ChartInterval(now, now); else return new ChartInterval(prices.get(0).getDate(), prices.get(prices.size() - 1).getDate()); default: throw new IllegalArgumentException(); } } } private enum ChartDetails { SCALING_LINEAR(Messages.LabelChartDetailChartScalingLinear), // SCALING_LOG(Messages.LabelChartDetailChartScalingLog), // CLOSING(Messages.LabelChartDetailChartDevelopmentClosing), // PURCHASEPRICE(Messages.LabelChartDetailChartDevelopmentClosingFIFO), // INVESTMENT(Messages.LabelChartDetailMarkerInvestments), // DIVIDENDS(Messages.LabelChartDetailMarkerDividends), // EVENTS(Messages.LabelChartDetailMarkerSplits), // EXTREMES(Messages.LabelChartDetailMarkerHighLow), // FIFOPURCHASE(Messages.LabelChartDetailMarkerPurchaseFIFO), // FLOATINGAVGPURCHASE(Messages.LabelChartDetailMarkerPurchaseMovingAverage), // BOLLINGERBANDS(Messages.LabelChartDetailIndicatorBollingerBands), // SMA_5DAYS(Messages.LabelChartDetailMovingAverage_5days), // SMA_20DAYS(Messages.LabelChartDetailMovingAverage_20days), // SMA_30DAYS(Messages.LabelChartDetailMovingAverage_30days), // SMA_38DAYS(Messages.LabelChartDetailMovingAverage_38days), // SMA_50DAYS(Messages.LabelChartDetailMovingAverage_50days), // SMA_90DAYS(Messages.LabelChartDetailMovingAverage_90days), // SMA_100DAYS(Messages.LabelChartDetailMovingAverage_100days), // SMA_200DAYS(Messages.LabelChartDetailMovingAverage_200days), // EMA_5DAYS(Messages.LabelChartDetailMovingAverage_5days), // EMA_20DAYS(Messages.LabelChartDetailMovingAverage_20days), // EMA_30DAYS(Messages.LabelChartDetailMovingAverage_30days), // EMA_38DAYS(Messages.LabelChartDetailMovingAverage_38days), // EMA_50DAYS(Messages.LabelChartDetailMovingAverage_50days), // EMA_90DAYS(Messages.LabelChartDetailMovingAverage_90days), // EMA_100DAYS(Messages.LabelChartDetailMovingAverage_100days), // EMA_200DAYS(Messages.LabelChartDetailMovingAverage_200days), // SHOW_MARKER_LINES(Messages.LabelChartDetailSettingsShowMarkerLines), // SHOW_DATA_LABELS(Messages.LabelChartDetailSettingsShowDataLabel), // SHOW_MISSING_TRADING_DAYS(Messages.LabelChartDetailSettingsShowMissingTradingDays), // SHOW_LIMITS(Messages.LabelChartDetailSettingsShowLimits); private final String label; private ChartDetails(String label) { this.label = label; } @Override public String toString() { return label; } } /* testing */ static class ChartRange { public final int start; public final int size; public final LocalDate startDate; public final LocalDate endDate; public ChartRange(int start, int end, LocalDate startDate, LocalDate endDate) { this.start = start; this.size = end - start; this.startDate = startDate; this.endDate = endDate; } /** * Maps the given {@link ChartInterval} to a range in the list of * security prices. Returns null if the interval does not intersect with * the list of prices. */ public static ChartRange createFor(List<SecurityPrice> prices, ChartInterval chartInterval) { int start = Collections.binarySearch(prices, new SecurityPrice(chartInterval.getStart(), 0), new SecurityPrice.ByDate()); if (start < 0) start = -start - 1; if (start >= prices.size()) return null; int end = Collections.binarySearch(prices, new SecurityPrice(chartInterval.getEnd(), 0), new SecurityPrice.ByDate()); if (end < 0) end = -end - 1; else end += 1; // include the entry that has been found if (end <= start) return null; return new ChartRange(start, end, prices.get(start).getDate(), prices.get(Math.min(end, prices.size() - 1)).getDate()); } } private Color colorQuote; private static final Color colorEventPurchase = Colors.getColor(26, 173, 33); private static final Color colorEventSale = Colors.getColor(232, 51, 69); private static final Color colorEventDividend = Colors.getColor(128, 0, 128); private static final Color colorHigh = Colors.getColor(0, 102, 0); private static final Color colorLow = Colors.getColor(128, 0, 0); private static final Color colorFifoPurchasePrice = Colors.getColor(226, 122, 121); private static final Color colorMovingAveragePurchasePrice = Colors.getColor(150, 82, 81); private static final Color colorBollingerBands = Colors.getColor(201, 141, 68); private static final Color colorSMA1 = Colors.getColor(179, 107, 107); // #B36B6B private static final Color colorSMA2 = Colors.getColor(179, 167, 107); // #B3A76B private static final Color colorSMA3 = Colors.getColor(131, 179, 107); // #83B36B private static final Color colorSMA4 = Colors.getColor(107, 179, 143); // #6BB38F private static final Color colorSMA5 = Colors.getColor(107, 155, 179); // #6B9BB3 private static final Color colorSMA6 = Colors.getColor(119, 107, 179); // #776BB3 private static final Color colorSMA7 = Colors.getColor(179, 107, 179); // #B36BB3 private static final Color colorEMA1 = Colors.getColor(200, 107, 107); // #C86B6B private static final Color colorEMA2 = Colors.getColor(200, 167, 107); // #C8A76B private static final Color colorEMA3 = Colors.getColor(131, 200, 107); // #83C86B private static final Color colorEMA4 = Colors.getColor(107, 200, 143); // #6BC88F private static final Color colorEMA5 = Colors.getColor(107, 155, 200); // #6B9BC8 private static final Color colorEMA6 = Colors.getColor(119, 107, 200); // #776BC8 private static final Color colorEMA7 = Colors.getColor(200, 107, 200); // #C86BB3 private static final Color colorAreaPositive = Colors.getColor(90, 114, 226); private static final Color colorAreaNegative = Colors.getColor(226, 91, 90); private static final Color colorNonTradingDay = Colors.getColor(255, 137, 89); private static final String PREF_KEY = "security-chart-details"; //$NON-NLS-1$ private DateTimeFormatter dateTimeFormatter = DateTimeFormatter.ofPattern("d LLL"); //$NON-NLS-1$ private Composite container; private Client client; private CurrencyConverter converter; private Security security; private TimelineChart chart; /** * Calculates dynamically for each security the interval of security prices * to be shown. */ private IntervalOption intervalOption = IntervalOption.Y2; private EnumSet<ChartDetails> chartConfig = EnumSet.of(ChartDetails.INVESTMENT, ChartDetails.EVENTS, ChartDetails.SCALING_LINEAR); private List<PaintListener> customPaintListeners = new ArrayList<>(); private List<PaintListener> customBehindPaintListener = new ArrayList<>(); private List<Transaction> customTooltipEvents = new ArrayList<>(); private int swtAntialias = SWT.ON; public SecuritiesChart(Composite parent, Client client, CurrencyConverter converter) { this.client = client; this.converter = converter; readChartConfig(client); container = new Composite(parent, SWT.NONE); container.setLayout(new FillLayout()); chart = new TimelineChart(container); chart.getTitle().setVisible(false); chart.getPlotArea().addPaintListener(event -> customPaintListeners.forEach(l -> l.paintControl(event))); chart.getPlotArea().addPaintListener(event -> customBehindPaintListener.forEach(l -> l.paintControl(event))); setupTooltip(); ILegend legend = chart.getLegend(); legend.setPosition(SWT.BOTTOM); legend.setVisible(true); } public IntervalOption getIntervalOption() { return intervalOption; } public void setIntervalOption(IntervalOption intervalOption) { this.intervalOption = intervalOption; } public void setQuoteColor(Color color) { this.colorQuote = color; } private void setupTooltip() { TimelineChartToolTip toolTip = chart.getToolTip(); toolTip.showToolTipOnlyForDatesInDataSeries(Messages.ColumnQuote); toolTip.setDefaultValueFormat(new DecimalFormat(Values.Quote.pattern())); toolTip.addSeriesExclude(Messages.LabelChartDetailChartDevelopment + "Positive"); //$NON-NLS-1$ toolTip.addSeriesExclude(Messages.LabelChartDetailChartDevelopment + "Negative"); //$NON-NLS-1$ toolTip.addSeriesExclude(Messages.LabelChartDetailChartDevelopment + "Zero"); //$NON-NLS-1$ toolTip.addSeriesExclude(Messages.SecurityMenuBuy); toolTip.addSeriesExclude(Messages.SecurityMenuBuy + "1"); //$NON-NLS-1$ toolTip.addSeriesExclude(Messages.SecurityMenuBuy + "2"); //$NON-NLS-1$ toolTip.addSeriesExclude(Messages.SecurityMenuSell); toolTip.addSeriesExclude(Messages.SecurityMenuSell + "1"); //$NON-NLS-1$ toolTip.addSeriesExclude(Messages.SecurityMenuSell + "2"); //$NON-NLS-1$ toolTip.addSeriesExclude(Messages.LabelChartDetailMarkerDividends); toolTip.addSeriesExclude(Messages.LabelChartDetailMarkerDividends + "1"); //$NON-NLS-1$ toolTip.addSeriesExclude(Messages.LabelChartDetailMarkerDividends + "2"); //$NON-NLS-1$ toolTip.addSeriesExclude(Messages.LabelChartDetailIndicatorBollingerBands); int precision = FormatHelper.getCalculatedQuoteDisplayPrecision(); DecimalFormat calculatedFormat = new DecimalFormat(Values.CalculatedQuote.pattern()); calculatedFormat.setMinimumFractionDigits(precision); calculatedFormat.setMaximumFractionDigits(precision); for (String period : new String[] { Messages.LabelChartDetailMovingAverage_5days, Messages.LabelChartDetailMovingAverage_20days, Messages.LabelChartDetailMovingAverage_30days, Messages.LabelChartDetailMovingAverage_38days, Messages.LabelChartDetailMovingAverage_50days, Messages.LabelChartDetailMovingAverage_100days, Messages.LabelChartDetailMovingAverage_200days, }) { toolTip.overrideValueFormat(String.format("%s (%s)", Messages.LabelChartDetailMovingAverageEMA, period), //$NON-NLS-1$ calculatedFormat); toolTip.overrideValueFormat(String.format("%s (%s)", Messages.LabelChartDetailMovingAverageSMA, period), //$NON-NLS-1$ calculatedFormat); } toolTip.overrideValueFormat(Messages.LabelChartDetailIndicatorBollingerBandsLower, calculatedFormat); toolTip.overrideValueFormat(Messages.LabelChartDetailIndicatorBollingerBandsUpper, calculatedFormat); toolTip.overrideValueFormat(Messages.LabelChartDetailMarkerPurchaseFIFO, calculatedFormat); toolTip.overrideValueFormat(Messages.LabelChartDetailMarkerPurchaseMovingAverage, calculatedFormat); toolTip.addExtraInfo((composite, focus) -> { if (focus instanceof Date) { Instant instant = ((Date) focus).toInstant(); ZonedDateTime zdt = instant.atZone(ZoneId.systemDefault()); LocalDate date = zdt.toLocalDate(); Interval displayInterval = Interval.of(date.minusDays(5), date.plusDays(5)); customTooltipEvents.stream() // .filter(t -> displayInterval.contains(t.getDateTime())) // .forEach(t -> { if (t instanceof AccountTransaction) addDividendTooltip(composite, (AccountTransaction) t); else if (t instanceof PortfolioTransaction) addInvestmentTooltip(composite, (PortfolioTransaction) t); }); } }); } private void addInvestmentTooltip(Composite composite, PortfolioTransaction t) { Label label = new Label(composite, SWT.NONE); label.setText(MessageFormat.format(Messages.LabelToolTipTransactionSummary, t.getType().toString(), dateTimeFormatter.format(t.getDateTime().toLocalDate()), t.getMonetaryAmount().toString())); label = new Label(composite, SWT.NONE); label.setText(MessageFormat.format(Messages.LabelToolTipInvestmentDetails, Values.Share.format(t.getShares()), Values.Quote.format( t.getGrossPricePerShare(converter.with(t.getSecurity().getCurrencyCode()))))); } private void addDividendTooltip(Composite composite, AccountTransaction t) { Label label = new Label(composite, SWT.NONE); String amount = t.getMonetaryAmount().toString(); label.setText(MessageFormat.format(Messages.LabelToolTipTransactionSummary, t.getType().toString(), dateTimeFormatter.format(t.getDateTime().toLocalDate()), amount)); if (t.getShares() == 0L) { label = new Label(composite, SWT.NONE); label.setText("\u2211 " + t.getGrossValue().toString()); //$NON-NLS-1$ } else { Optional<Unit> grossValue = t.getUnit(Unit.Type.GROSS_VALUE); long gross = grossValue.isPresent() ? grossValue.get().getForex().getAmount() : t.getGrossValueAmount(); String currency = grossValue.isPresent() ? grossValue.get().getForex().getCurrencyCode() : t.getCurrencyCode(); // gross value in either forex currency or transaction currency String grossAmount = Money.of(currency, gross).toString(); // gross value in transaction currency String grossValueAmount = Money.of(t.getCurrencyCode(), t.getGrossValueAmount()).toString(); // display gross value in transaction currency, different gross // value in security currency exists if (!grossValueAmount.equals(grossAmount)) { label = new Label(composite, SWT.NONE); label.setText(MessageFormat.format(Messages.LabelToolTipDividendDetailsGross, grossValueAmount)); } // display gross value, if different to net amount if (!grossAmount.equals(amount)) { label = new Label(composite, SWT.NONE); label.setText(MessageFormat.format(Messages.LabelToolTipDividendDetailsGross, grossAmount)); } // display dividend per share in security currency label = new Label(composite, SWT.NONE); label.setText(MessageFormat.format(Messages.LabelToolTipDividendDetails, Values.Share.format(t.getShares()), currency, Values.Quote.format(Math.round(gross * Values.Share.divider() * Values.Quote.factorToMoney() / t.getShares())))); } } private void configureSeriesPainter(ILineSeries series, Date[] dates, double[] values, Color color, int lineWidth, LineStyle lineStyle, boolean enableArea, boolean visibleInLegend) { if (lineWidth != 0) series.setLineWidth(lineWidth); series.setLineStyle(lineStyle); series.setXDateSeries(dates); series.enableArea(enableArea); series.setYSeries(values); series.setAntialias(swtAntialias); if (color != null) series.setLineColor(color); series.setVisibleInLegend(visibleInLegend); } private final void readChartConfig(Client client) { String pref = ReadOnlyClient.unwrap(client).getProperty(PREF_KEY); if (pref == null) return; chartConfig.clear(); for (String key : pref.split(",")) //$NON-NLS-1$ { try { chartConfig.add(ChartDetails.valueOf(key)); } catch (IllegalArgumentException ignore) { // do not print exception to the log as it confuses users. The // old SMA200 label has been renamed, nothing we can change // anymore } } } public void addButtons(ToolBarManager toolBar) { List<Action> viewActions = new ArrayList<>(); for (IntervalOption option : IntervalOption.values()) { SimpleAction action = new SimpleAction(option.getLabel(), IAction.AS_CHECK_BOX, option.getTooltip(), a -> { this.intervalOption = option; updateChart(); for (Action viewAction : viewActions) viewAction.setChecked(a.equals(viewAction)); }); if (intervalOption == option) action.setChecked(true); viewActions.add(action); toolBar.add(action); } toolBar.add(new DropDown(Messages.MenuConfigureChart, Images.CONFIG, SWT.NONE, this::chartConfigAboutToShow)); } private void chartConfigAboutToShow(IMenuManager manager) { MenuManager subMenuChartScaling = new MenuManager(Messages.LabelChartDetailChartScaling, null); MenuManager subMenuChartDevelopment = new MenuManager(Messages.LabelChartDetailChartDevelopment, null); MenuManager subMenuChartMarker = new MenuManager(Messages.LabelChartDetailMarker, null); MenuManager subMenuChartIndicator = new MenuManager(Messages.LabelChartDetailIndicator, null); MenuManager subMenuChartMovingAverage = new MenuManager(Messages.LabelChartDetailMovingAverage, null); MenuManager subMenuChartMovingAverageSMA = new MenuManager(Messages.LabelChartDetailMovingAverageSMA, null); MenuManager subMenuChartMovingAverageEMA = new MenuManager(Messages.LabelChartDetailMovingAverageEMA, null); MenuManager subMenuChartSettings = new MenuManager(Messages.LabelChartDetailSettings, null); subMenuChartScaling.add(addMenuAction(ChartDetails.SCALING_LINEAR)); subMenuChartScaling.add(addMenuAction(ChartDetails.SCALING_LOG)); subMenuChartDevelopment.add(addMenuAction(ChartDetails.CLOSING)); subMenuChartDevelopment.add(addMenuAction(ChartDetails.PURCHASEPRICE)); subMenuChartMarker.add(addMenuAction(ChartDetails.INVESTMENT)); subMenuChartMarker.add(addMenuAction(ChartDetails.DIVIDENDS)); subMenuChartMarker.add(addMenuAction(ChartDetails.EVENTS)); subMenuChartMarker.add(addMenuAction(ChartDetails.EXTREMES)); subMenuChartMarker.add(addMenuAction(ChartDetails.FIFOPURCHASE)); subMenuChartMarker.add(addMenuAction(ChartDetails.FLOATINGAVGPURCHASE)); subMenuChartMarker.add(addMenuAction(ChartDetails.SHOW_LIMITS)); subMenuChartIndicator.add(addMenuAction(ChartDetails.BOLLINGERBANDS)); subMenuChartMovingAverageSMA.add(addMenuAction(ChartDetails.SMA_5DAYS)); subMenuChartMovingAverageSMA.add(addMenuAction(ChartDetails.SMA_20DAYS)); subMenuChartMovingAverageSMA.add(addMenuAction(ChartDetails.SMA_30DAYS)); subMenuChartMovingAverageSMA.add(addMenuAction(ChartDetails.SMA_38DAYS)); subMenuChartMovingAverageSMA.add(addMenuAction(ChartDetails.SMA_50DAYS)); subMenuChartMovingAverageSMA.add(addMenuAction(ChartDetails.SMA_90DAYS)); subMenuChartMovingAverageSMA.add(addMenuAction(ChartDetails.SMA_100DAYS)); subMenuChartMovingAverageSMA.add(addMenuAction(ChartDetails.SMA_200DAYS)); subMenuChartMovingAverageEMA.add(addMenuAction(ChartDetails.EMA_5DAYS)); subMenuChartMovingAverageEMA.add(addMenuAction(ChartDetails.EMA_20DAYS)); subMenuChartMovingAverageEMA.add(addMenuAction(ChartDetails.EMA_30DAYS)); subMenuChartMovingAverageEMA.add(addMenuAction(ChartDetails.EMA_38DAYS)); subMenuChartMovingAverageEMA.add(addMenuAction(ChartDetails.EMA_50DAYS)); subMenuChartMovingAverageEMA.add(addMenuAction(ChartDetails.EMA_90DAYS)); subMenuChartMovingAverageEMA.add(addMenuAction(ChartDetails.EMA_100DAYS)); subMenuChartMovingAverageEMA.add(addMenuAction(ChartDetails.EMA_200DAYS)); subMenuChartSettings.add(addMenuAction(ChartDetails.SHOW_MARKER_LINES)); subMenuChartSettings.add(addMenuAction(ChartDetails.SHOW_DATA_LABELS)); subMenuChartSettings.add(addMenuAction(ChartDetails.SHOW_MISSING_TRADING_DAYS)); manager.add(subMenuChartScaling); manager.add(subMenuChartDevelopment); manager.add(subMenuChartMarker); manager.add(subMenuChartIndicator); manager.add(subMenuChartMovingAverage); subMenuChartMovingAverage.add(subMenuChartMovingAverageSMA); subMenuChartMovingAverage.add(subMenuChartMovingAverageEMA); manager.add(subMenuChartSettings); } private Action addMenuAction(ChartDetails detail) { Action action = new SimpleAction(detail.toString(), a -> { boolean isActive = chartConfig.contains(detail); if (isActive) chartConfig.remove(detail); else chartConfig.add(detail); if (!isActive) { switch (detail) { case SCALING_LINEAR: chartConfig.remove(ChartDetails.SCALING_LOG); break; case SCALING_LOG: chartConfig.remove(ChartDetails.SCALING_LINEAR); chartConfig.remove(ChartDetails.PURCHASEPRICE); chartConfig.remove(ChartDetails.CLOSING); break; case CLOSING: chartConfig.remove(ChartDetails.PURCHASEPRICE); chartConfig.remove(ChartDetails.SCALING_LOG); break; case PURCHASEPRICE: chartConfig.remove(ChartDetails.CLOSING); chartConfig.remove(ChartDetails.SCALING_LOG); break; default: break; } } if (!chartConfig.contains(ChartDetails.SCALING_LINEAR) && !chartConfig.contains(ChartDetails.SCALING_LOG)) chartConfig.add(ChartDetails.SCALING_LINEAR); ReadOnlyClient.unwrap(client).setProperty(PREF_KEY, String.join(",", //$NON-NLS-1$ chartConfig.stream().map(ChartDetails::name).collect(Collectors.toList()))); updateChart(); }); action.setChecked(chartConfig.contains(detail)); return action; } public void updateChart(Client client, Security security) { this.client = client; this.security = security; updateChart(); } public Control getControl() { return container; } private void updateChart() { chart.setRedraw(false); try { // delete all line series (quotes + possibly moving average) ISeries[] series = chart.getSeriesSet().getSeries(); for (ISeries s : series) chart.getSeriesSet().deleteSeries(s.getId()); chart.clearMarkerLines(); chart.clearNonTradingDayMarker(); customPaintListeners.clear(); customBehindPaintListener.clear(); customTooltipEvents.clear(); if (security == null || security.getPrices().isEmpty()) { chart.redraw(); return; } boolean showAreaRelativeToFirstQuote = chartConfig.contains(ChartDetails.CLOSING) || chartConfig.contains(ChartDetails.PURCHASEPRICE); // determine the interval to be shown in the chart ChartInterval chartInterval = intervalOption.getInverval(client, converter, security); // determine index range for given interval in prices list List<SecurityPrice> prices = security.getPricesIncludingLatest(); ChartRange range = ChartRange.createFor(prices, chartInterval); if (range == null) { chart.redraw(); return; } // prepare value arrays LocalDate[] dates = new LocalDate[range.size]; double[] values = new double[range.size]; double[] valuesRelative = new double[range.size]; double[] valuesRelativePositive = new double[range.size]; double[] valuesRelativeNegative = new double[range.size]; double[] valuesZeroLine = new double[range.size]; double firstQuote = 0; // Disable SWT antialias for more than 1000 records due to SWT // performance issue in Drawing swtAntialias = range.size > 1000 ? SWT.OFF : SWT.ON; if (!chartConfig.contains(ChartDetails.PURCHASEPRICE)) { SecurityPrice p2 = prices.get(range.start); firstQuote = (p2.getValue() / Values.Quote.divider()); } else { Optional<Double> purchasePrice = getLatestPurchasePrice(); if (purchasePrice.isPresent()) firstQuote = purchasePrice.get(); else showAreaRelativeToFirstQuote = false; } addChartMarkerBackground(chartInterval, range); for (int ii = 0; ii < range.size; ii++) { SecurityPrice p = prices.get(ii + range.start); dates[ii] = p.getDate(); values[ii] = p.getValue() / Values.Quote.divider(); if (showAreaRelativeToFirstQuote) { valuesRelative[ii] = (p.getValue() / Values.Quote.divider()) - firstQuote; valuesZeroLine[ii] = 0; if (valuesRelative[ii] >= 0) { valuesRelativePositive[ii] = valuesRelative[ii]; valuesRelativeNegative[ii] = 0; } else { valuesRelativePositive[ii] = 0; valuesRelativeNegative[ii] = valuesRelative[ii]; } } } Date[] javaDates = TimelineChart.toJavaUtilDate(dates); if (showAreaRelativeToFirstQuote) { ILineSeries lineSeries2ndNegative = (ILineSeries) chart.getSeriesSet().createSeries(SeriesType.LINE, Messages.LabelChartDetailChartDevelopmentClosing + "Negative"); //$NON-NLS-1$ lineSeries2ndNegative.setSymbolType(PlotSymbolType.NONE); lineSeries2ndNegative.setYAxisId(1); configureSeriesPainter(lineSeries2ndNegative, javaDates, valuesRelativeNegative, colorAreaNegative, 1, LineStyle.SOLID, true, false); ILineSeries lineSeries2ndPositive = (ILineSeries) chart.getSeriesSet().createSeries(SeriesType.LINE, Messages.LabelChartDetailChartDevelopmentClosing + "Positive"); //$NON-NLS-1$ lineSeries2ndPositive.setSymbolType(PlotSymbolType.NONE); lineSeries2ndPositive.setYAxisId(1); configureSeriesPainter(lineSeries2ndPositive, javaDates, valuesRelativePositive, colorAreaPositive, 1, LineStyle.SOLID, true, false); } ILineSeries lineSeries = (ILineSeries) chart.getSeriesSet().createSeries(SeriesType.LINE, Messages.ColumnQuote); lineSeries.setSymbolType(PlotSymbolType.NONE); configureSeriesPainter(lineSeries, javaDates, values, colorQuote, 2, LineStyle.SOLID, !showAreaRelativeToFirstQuote, false); chart.adjustRange(); addChartMarkerForeground(chartInterval); chart.adjustRange(); IAxis yAxis1st = chart.getAxisSet().getYAxis(0); IAxis yAxis2nd = chart.getAxisSet().getYAxis(1); yAxis2nd.setRange( new Range(yAxis1st.getRange().lower - firstQuote, yAxis1st.getRange().upper - firstQuote)); yAxis1st.enableLogScale(chartConfig.contains(ChartDetails.SCALING_LOG)); yAxis2nd.enableLogScale(chartConfig.contains(ChartDetails.SCALING_LOG)); yAxis1st.getTick().setVisible(true); if (chartConfig.contains(ChartDetails.SHOW_MISSING_TRADING_DAYS)) { TradeCalendar tradeCalendar = TradeCalendarManager.getInstance(security); List<LocalDate> calendarDates = new ArrayList<>(); for (LocalDate calendarDate = dates[0]; calendarDate .isBefore(dates[dates.length - 1]); calendarDate = calendarDate.plusDays(1)) calendarDates.add(calendarDate); for (LocalDate pricingDate : dates) calendarDates.remove(pricingDate); for (LocalDate targetDate : calendarDates) { if (!tradeCalendar.isHoliday(targetDate)) chart.addNonTradingDayMarker(targetDate, colorNonTradingDay); } } } finally { chart.setRedraw(true); chart.redraw(); } } private void addChartMarkerBackground(ChartInterval chartInterval, ChartRange range) { if (chartConfig.contains(ChartDetails.BOLLINGERBANDS)) addBollingerBandsMarkerLines(chartInterval, 20, 2); if (chartConfig.contains(ChartDetails.SMA_5DAYS)) addSMAMarkerLines(chartInterval, Messages.LabelChartDetailMovingAverageSMA, Messages.LabelChartDetailMovingAverage_5days, 5, colorSMA1); if (chartConfig.contains(ChartDetails.SMA_20DAYS)) addSMAMarkerLines(chartInterval, Messages.LabelChartDetailMovingAverageSMA, Messages.LabelChartDetailMovingAverage_20days, 20, colorSMA2); if (chartConfig.contains(ChartDetails.SMA_30DAYS)) addSMAMarkerLines(chartInterval, Messages.LabelChartDetailMovingAverageSMA, Messages.LabelChartDetailMovingAverage_30days, 30, colorSMA3); if (chartConfig.contains(ChartDetails.SMA_38DAYS)) addSMAMarkerLines(chartInterval, Messages.LabelChartDetailMovingAverageSMA, Messages.LabelChartDetailMovingAverage_38days, 38, colorSMA4); if (chartConfig.contains(ChartDetails.SMA_50DAYS)) addSMAMarkerLines(chartInterval, Messages.LabelChartDetailMovingAverageSMA, Messages.LabelChartDetailMovingAverage_50days, 50, colorSMA4); if (chartConfig.contains(ChartDetails.SMA_90DAYS)) addSMAMarkerLines(chartInterval, Messages.LabelChartDetailMovingAverageSMA, Messages.LabelChartDetailMovingAverage_90days, 90, colorSMA5); if (chartConfig.contains(ChartDetails.SMA_100DAYS)) addSMAMarkerLines(chartInterval, Messages.LabelChartDetailMovingAverageSMA, Messages.LabelChartDetailMovingAverage_100days, 100, colorSMA6); if (chartConfig.contains(ChartDetails.SMA_200DAYS)) addSMAMarkerLines(chartInterval, Messages.LabelChartDetailMovingAverageSMA, Messages.LabelChartDetailMovingAverage_200days, 200, colorSMA7); if (chartConfig.contains(ChartDetails.EMA_5DAYS)) addEMAMarkerLines(chartInterval, Messages.LabelChartDetailMovingAverageEMA, Messages.LabelChartDetailMovingAverage_5days, 5, colorEMA1); if (chartConfig.contains(ChartDetails.EMA_20DAYS)) addEMAMarkerLines(chartInterval, Messages.LabelChartDetailMovingAverageEMA, Messages.LabelChartDetailMovingAverage_20days, 20, colorEMA2); if (chartConfig.contains(ChartDetails.EMA_30DAYS)) addEMAMarkerLines(chartInterval, Messages.LabelChartDetailMovingAverageEMA, Messages.LabelChartDetailMovingAverage_30days, 30, colorEMA3); if (chartConfig.contains(ChartDetails.EMA_38DAYS)) addEMAMarkerLines(chartInterval, Messages.LabelChartDetailMovingAverageEMA, Messages.LabelChartDetailMovingAverage_38days, 38, colorEMA4); if (chartConfig.contains(ChartDetails.EMA_50DAYS)) addEMAMarkerLines(chartInterval, Messages.LabelChartDetailMovingAverageEMA, Messages.LabelChartDetailMovingAverage_50days, 50, colorEMA4); if (chartConfig.contains(ChartDetails.EMA_90DAYS)) addEMAMarkerLines(chartInterval, Messages.LabelChartDetailMovingAverageEMA, Messages.LabelChartDetailMovingAverage_90days, 90, colorEMA5); if (chartConfig.contains(ChartDetails.EMA_100DAYS)) addEMAMarkerLines(chartInterval, Messages.LabelChartDetailMovingAverageEMA, Messages.LabelChartDetailMovingAverage_100days, 100, colorEMA6); if (chartConfig.contains(ChartDetails.EMA_200DAYS)) addEMAMarkerLines(chartInterval, Messages.LabelChartDetailMovingAverageEMA, Messages.LabelChartDetailMovingAverage_200days, 200, colorEMA7); if (chartConfig.contains(ChartDetails.SHOW_LIMITS)) addLimitLines(chartInterval, range); } private void addChartMarkerForeground(ChartInterval chartInterval) { if (chartConfig.contains(ChartDetails.FIFOPURCHASE)) addFIFOPurchasePrice(chartInterval); if (chartConfig.contains(ChartDetails.FLOATINGAVGPURCHASE)) addMovingAveragePurchasePrice(chartInterval); if (chartConfig.contains(ChartDetails.INVESTMENT)) addInvestmentMarkerLines(chartInterval); if (chartConfig.contains(ChartDetails.DIVIDENDS)) addDividendMarkerLines(chartInterval); if (chartConfig.contains(ChartDetails.EVENTS)) addEventMarkerLines(chartInterval); if (chartConfig.contains(ChartDetails.EXTREMES)) addExtremesMarkerLines(chartInterval); } private void addLimitLines(ChartInterval chartInterval, ChartRange range) { this.security.getAttributes().getMap().forEach((key, val) -> { // null OR not Limit Price --> ignore if (val == null || val.getClass() != LimitPrice.class) return; LimitPrice limitAttribute = (LimitPrice) val; Optional<AttributeType> attributeName = ReadOnlyClient.unwrap(client) // unwrap because ReadOnlyClient only contains/provides default attributes .getSettings().getAttributeTypes() .filter(attr -> attr.getId().equals(key)).findFirst(); // could not find name of limit attribute --> don't draw if (attributeName.isEmpty()) return; String lineID = attributeName.get().getName() + " (" + limitAttribute.toString() + ")"; //$NON-NLS-1$ //$NON-NLS-2$ // horizontal line: only two points required LocalDate[] dates = new LocalDate[2]; dates[0] = range.startDate; dates[1] = range.endDate; // both points with same y-value double[] values = new double[2]; values[0] = values[1] = limitAttribute.getValue() / Values.Quote.divider(); ILineSeries lineSeriesLimit = (ILineSeries) chart.getSeriesSet().createSeries(SeriesType.LINE, lineID); lineSeriesLimit.setXDateSeries(TimelineChart.toJavaUtilDate(dates)); lineSeriesLimit.setLineWidth(2); lineSeriesLimit.setLineStyle(LineStyle.DASH); lineSeriesLimit.enableArea(false); lineSeriesLimit.setSymbolType(PlotSymbolType.NONE); lineSeriesLimit.setYSeries(values); lineSeriesLimit.setAntialias(swtAntialias); lineSeriesLimit.setLineColor(Colors.ICON_ORANGE); lineSeriesLimit.setYAxisId(0); lineSeriesLimit.setVisibleInLegend(true); }); } private void addSMAMarkerLines(ChartInterval chartInterval, String smaSeries, String smaDaysWording, int smaDays, Color smaColor) { ChartLineSeriesAxes smaLines = new SimpleMovingAverage(smaDays, this.security, chartInterval).getSMA(); if (smaLines == null || smaLines.getValues() == null || smaLines.getDates() == null) return; @SuppressWarnings("nls") String lineID = smaSeries + " (" + smaDaysWording + ")"; ILineSeries lineSeriesSMA = (ILineSeries) chart.getSeriesSet().createSeries(SeriesType.LINE, lineID); lineSeriesSMA.setXDateSeries(smaLines.getDates()); lineSeriesSMA.setLineWidth(2); lineSeriesSMA.enableArea(false); lineSeriesSMA.setSymbolType(PlotSymbolType.NONE); lineSeriesSMA.setYSeries(smaLines.getValues()); lineSeriesSMA.setAntialias(swtAntialias); lineSeriesSMA.setLineColor(smaColor); lineSeriesSMA.setYAxisId(0); lineSeriesSMA.setVisibleInLegend(true); } private void addEMAMarkerLines(ChartInterval chartInterval, String emaSeries, String emaDaysWording, int emaDays, Color emaColor) { ChartLineSeriesAxes emaLines = new ExponentialMovingAverage(emaDays, this.security, chartInterval).getEMA(); if (emaLines == null || emaLines.getValues() == null || emaLines.getDates() == null) return; @SuppressWarnings("nls") String lineID = emaSeries + " (" + emaDaysWording + ")"; ILineSeries lineSeriesEMA = (ILineSeries) chart.getSeriesSet().createSeries(SeriesType.LINE, lineID); lineSeriesEMA.setXDateSeries(emaLines.getDates()); lineSeriesEMA.setLineWidth(2); lineSeriesEMA.enableArea(false); lineSeriesEMA.setSymbolType(PlotSymbolType.NONE); lineSeriesEMA.setYSeries(emaLines.getValues()); lineSeriesEMA.setAntialias(swtAntialias); lineSeriesEMA.setLineColor(emaColor); lineSeriesEMA.setYAxisId(0); lineSeriesEMA.setVisibleInLegend(true); } private void addInvestmentMarkerLines(ChartInterval chartInterval) { List<PortfolioTransaction> purchase = client.getPortfolios().stream().flatMap(p -> p.getTransactions().stream()) .filter(t -> t.getSecurity() == security) .filter(t -> t.getType() == PortfolioTransaction.Type.BUY || t.getType() == PortfolioTransaction.Type.DELIVERY_INBOUND) .filter(t -> chartInterval.contains(t.getDateTime())) // .sorted(new Transaction.ByDate()).collect(Collectors.toList()); addInvestmentMarkers(purchase, Messages.SecurityMenuBuy, colorEventPurchase); List<PortfolioTransaction> sales = client.getPortfolios().stream().flatMap(p -> p.getTransactions().stream()) .filter(t -> t.getSecurity() == security) .filter(t -> t.getType() == PortfolioTransaction.Type.SELL || t.getType() == PortfolioTransaction.Type.DELIVERY_OUTBOUND) .filter(t -> chartInterval.contains(t.getDateTime())) // .sorted(new Transaction.ByDate()).collect(Collectors.toList()); addInvestmentMarkers(sales, Messages.SecurityMenuSell, colorEventSale); } private void addInvestmentMarkers(List<PortfolioTransaction> transactions, String seriesLabel, Color color) { if (transactions.isEmpty()) return; customTooltipEvents.addAll(transactions); if (chartConfig.contains(ChartDetails.SHOW_MARKER_LINES)) { transactions.forEach(t -> { String label = Values.Share.format(t.getType().isPurchase() ? t.getShares() : -t.getShares()); double value = t.getGrossPricePerShare(converter.with(t.getSecurity().getCurrencyCode())).getAmount() / Values.Quote.divider(); chart.addMarkerLine(t.getDateTime().toLocalDate(), color, label, value); }); } else { Date[] dates = transactions.stream().map(PortfolioTransaction::getDateTime) .map(d -> Date.from(d.atZone(ZoneId.systemDefault()).toInstant())) .collect(Collectors.toList()).toArray(new Date[0]); double[] values = transactions.stream().mapToDouble( t -> t.getGrossPricePerShare(converter.with(t.getSecurity().getCurrencyCode())).getAmount() / Values.Quote.divider()) .toArray(); ILineSeries border = (ILineSeries) chart.getSeriesSet().createSeries(SeriesType.LINE, seriesLabel + "2"); //$NON-NLS-1$ border.setYAxisId(0); border.setSymbolColor(Display.getDefault().getSystemColor(SWT.COLOR_BLACK)); border.setSymbolType(PlotSymbolType.DIAMOND); border.setSymbolSize(7); configureSeriesPainter(border, dates, values, null, 0, LineStyle.NONE, false, false); ILineSeries background = (ILineSeries) chart.getSeriesSet().createSeries(SeriesType.LINE, seriesLabel + "1"); //$NON-NLS-1$ background.setYAxisId(0); background.setSymbolType(PlotSymbolType.DIAMOND); background.setSymbolSize(6); background.setSymbolColor(Display.getDefault().getSystemColor(SWT.COLOR_WHITE)); configureSeriesPainter(background, dates, values, null, 0, LineStyle.NONE, false, false); ILineSeries inner = (ILineSeries) chart.getSeriesSet().createSeries(SeriesType.LINE, seriesLabel); inner.setYAxisId(0); inner.setSymbolType(PlotSymbolType.DIAMOND); inner.setSymbolSize(4); inner.setSymbolColor(color); configureSeriesPainter(inner, dates, values, color, 0, LineStyle.NONE, false, true); if (chartConfig.contains(ChartDetails.SHOW_DATA_LABELS)) { customPaintListeners.add(event -> { IAxis xAxis = chart.getAxisSet().getXAxis(0); IAxis yAxis = chart.getAxisSet().getYAxis(0); for (int index = 0; index < dates.length; index++) { int x = xAxis.getPixelCoordinate(dates[index].getTime()); int y = yAxis.getPixelCoordinate(values[index]); PortfolioTransaction t = transactions.get(index); String label = Values.Share.format(t.getType().isPurchase() ? t.getShares() : -t.getShares()); Point textExtent = event.gc.textExtent(label); event.gc.setForeground(Colors.theme().defaultForeground()); event.gc.drawText(label, x - (textExtent.x / 2), y + border.getSymbolSize(), true); } }); } } } private void addDividendMarkerLines(ChartInterval chartInterval) { List<AccountTransaction> dividends = client.getAccounts().stream().flatMap(a -> a.getTransactions().stream()) .filter(t -> t.getSecurity() == security) .filter(t -> t.getType() == AccountTransaction.Type.DIVIDENDS) .filter(t -> chartInterval.contains(t.getDateTime())).sorted(new Transaction.ByDate()) .collect(Collectors.toList()); if (dividends.isEmpty()) return; customTooltipEvents.addAll(dividends); if (chartConfig.contains(ChartDetails.SHOW_MARKER_LINES)) { dividends.forEach(t -> chart.addMarkerLine(t.getDateTime().toLocalDate(), colorEventDividend, getDividendLabel(t))); } else { Date[] dates = dividends.stream().map(AccountTransaction::getDateTime) .map(d -> Date.from(d.atZone(ZoneId.systemDefault()).toInstant())) .collect(Collectors.toList()).toArray(new Date[0]); IAxis yAxis1st = chart.getAxisSet().getYAxis(0); double yAxis1stAxisPrice = yAxis1st.getRange().lower; double[] values = new double[dates.length]; Arrays.fill(values, yAxis1stAxisPrice); ILineSeries border = (ILineSeries) chart.getSeriesSet().createSeries(SeriesType.LINE, Messages.LabelChartDetailMarkerDividends + "2"); //$NON-NLS-1$ border.setYAxisId(0); border.setSymbolType(PlotSymbolType.SQUARE); border.setSymbolSize(6); border.setSymbolColor(Display.getDefault().getSystemColor(SWT.COLOR_BLACK)); configureSeriesPainter(border, dates, values, null, 0, LineStyle.NONE, false, false); ILineSeries background = (ILineSeries) chart.getSeriesSet().createSeries(SeriesType.LINE, Messages.LabelChartDetailMarkerDividends + "1"); //$NON-NLS-1$ background.setYAxisId(0); background.setSymbolType(PlotSymbolType.SQUARE); background.setSymbolSize(5); background.setSymbolColor(Display.getDefault().getSystemColor(SWT.COLOR_WHITE)); configureSeriesPainter(background, dates, values, null, 0, LineStyle.NONE, false, false); ILineSeries inner = (ILineSeries) chart.getSeriesSet().createSeries(SeriesType.LINE, Messages.LabelChartDetailMarkerDividends); inner.setYAxisId(0); inner.setSymbolType(PlotSymbolType.SQUARE); inner.setSymbolSize(3); inner.setSymbolColor(colorEventDividend); configureSeriesPainter(inner, dates, values, null, 0, LineStyle.NONE, false, true); if (chartConfig.contains(ChartDetails.SHOW_DATA_LABELS)) { customPaintListeners.add(event -> { IAxis xAxis = chart.getAxisSet().getXAxis(0); IAxis yAxis = chart.getAxisSet().getYAxis(0); int yPosLabel = 0; int lastWriteLabelLevel1 = 0; int lastWriteLabelLevel2 = 0; int lastWriteLabelLevel3 = 0; for (int index = 0; index < dates.length; index++) { boolean freeSpaceForLabelLevel1 = true; boolean freeSpaceForLabelLevel2 = true; boolean freeSpaceForLabelLevel3 = true; int x = xAxis.getPixelCoordinate(dates[index].getTime()); int y = yAxis.getPixelCoordinate(values[index]); String label = getDividendLabel(dividends.get(index)); Point textExtent = event.gc.textExtent(label); event.gc.setForeground(Colors.theme().defaultForeground()); if (((x - (textExtent.x / 2)) - lastWriteLabelLevel1) <= 0) freeSpaceForLabelLevel1 = false; if (((x - (textExtent.x / 2)) - lastWriteLabelLevel2) <= 0) freeSpaceForLabelLevel2 = false; if (((x - (textExtent.x / 2)) - lastWriteLabelLevel3) <= 0) freeSpaceForLabelLevel3 = false; if (freeSpaceForLabelLevel1 || freeSpaceForLabelLevel2 || freeSpaceForLabelLevel3) { if (freeSpaceForLabelLevel1) { yPosLabel = y - textExtent.y - border.getSymbolSize(); lastWriteLabelLevel1 = (x + (textExtent.x / 2)); } if (freeSpaceForLabelLevel2 && !freeSpaceForLabelLevel1) { yPosLabel = yPosLabel - textExtent.y; lastWriteLabelLevel2 = (x + (textExtent.x / 2)); } if (freeSpaceForLabelLevel3 && !freeSpaceForLabelLevel2 && !freeSpaceForLabelLevel1) { yPosLabel = yPosLabel - textExtent.y; lastWriteLabelLevel3 = (x + (textExtent.x / 2)); } event.gc.drawText(label, x - (textExtent.x / 2), yPosLabel, true); } } }); } } } private String getDividendLabel(AccountTransaction t) { if (t.getShares() == 0L) { return "\u2211 " + t.getGrossValue().toString(); //$NON-NLS-1$ } else { Optional<Unit> grossValue = t.getUnit(Unit.Type.GROSS_VALUE); long gross = grossValue.isPresent() ? grossValue.get().getForex().getAmount() : t.getGrossValueAmount(); long perShare = Math.round(gross * Values.Share.divider() * Values.Quote.factorToMoney() / t.getShares()); return Values.Quote.format(perShare); } } private void addEventMarkerLines(ChartInterval chartInterval) { security.getEvents().stream() // .filter(e -> chartInterval.contains(e.getDate())) // .filter(e -> e.getType() != SecurityEvent.Type.DIVIDEND_PAYMENT) // .forEach(e -> chart.addMarkerLine(e.getDate(), Display.getDefault().getSystemColor(SWT.COLOR_DARK_GRAY), e.getDetails())); } private void addExtremesMarkerLines(ChartInterval chartInterval) { Optional<SecurityPrice> max = security.getPricesIncludingLatest().stream() // .filter(p -> chartInterval.contains(p.getDate())) // .max(Comparator.comparing(SecurityPrice::getValue)); Optional<SecurityPrice> min = security.getPricesIncludingLatest().stream() // .filter(p -> chartInterval.contains(p.getDate())) // .min(Comparator.comparing(SecurityPrice::getValue)); max.ifPresent(high -> addExtremeMarker(high, PlotSymbolType.TRIANGLE, // Messages.LabelChartDetailMarkerHigh, colorHigh)); min.ifPresent(low -> addExtremeMarker(low, PlotSymbolType.INVERTED_TRIANGLE, // Messages.LabelChartDetailMarkerLow, colorLow)); } private void addExtremeMarker(SecurityPrice price, PlotSymbolType plotSymbolType, String seriesLabel, Color color) { LocalDate eventDate = price.getDate(); String valueFormat = Values.Quote.format(price.getValue()); double value = price.getValue() / Values.Quote.divider(); if (chartConfig.contains(ChartDetails.SHOW_MARKER_LINES)) { chart.addMarkerLine(eventDate, color, valueFormat); } else { Date zonedDate = Date.from(eventDate.atStartOfDay(ZoneId.systemDefault()).toInstant()); ILineSeries inner = (ILineSeries) chart.getSeriesSet().createSeries(SeriesType.LINE, seriesLabel); inner.setYAxisId(0); inner.setSymbolType(plotSymbolType); inner.setSymbolSize(6); inner.setSymbolColor(color); configureSeriesPainter(inner, new Date[] { zonedDate }, new double[] { value }, color, 0, LineStyle.NONE, false, true); if (chartConfig.contains(ChartDetails.SHOW_DATA_LABELS)) { customPaintListeners.add(event -> { IAxis xAxis = chart.getAxisSet().getXAxis(0); IAxis yAxis = chart.getAxisSet().getYAxis(0); int x = xAxis.getPixelCoordinate(zonedDate.getTime()); int y = yAxis.getPixelCoordinate(value); Point textExtent = event.gc.textExtent(valueFormat); event.gc.setForeground(Colors.theme().defaultForeground()); if (inner.getSymbolColor() == colorHigh) y = y - textExtent.y - inner.getSymbolSize(); else y = y + inner.getSymbolSize(); event.gc.drawText(valueFormat, x - (textExtent.x / 2), y, true); }); } } } private void addBollingerBandsMarkerLines(ChartInterval chartInterval, int bollingerBandsDays, double bollingerBandsFactor) { BollingerBands bands = new BollingerBands(bollingerBandsDays, bollingerBandsFactor, this.security, chartInterval); ChartLineSeriesAxes lowerBand = bands.getLowerBand(); if (lowerBand == null || lowerBand.getValues() == null || lowerBand.getDates() == null) return; ILineSeries lineSeriesBollingerBandsLowerBand = (ILineSeries) chart.getSeriesSet().createSeries(SeriesType.LINE, Messages.LabelChartDetailIndicatorBollingerBandsLower); lineSeriesBollingerBandsLowerBand.setXDateSeries(lowerBand.getDates()); lineSeriesBollingerBandsLowerBand.setLineStyle(LineStyle.SOLID); lineSeriesBollingerBandsLowerBand.setLineWidth(2); lineSeriesBollingerBandsLowerBand.setSymbolType(PlotSymbolType.NONE); lineSeriesBollingerBandsLowerBand.setYSeries(lowerBand.getValues()); lineSeriesBollingerBandsLowerBand.setAntialias(swtAntialias); lineSeriesBollingerBandsLowerBand.setLineColor(colorBollingerBands); lineSeriesBollingerBandsLowerBand.setYAxisId(0); lineSeriesBollingerBandsLowerBand.setVisibleInLegend(false); ChartLineSeriesAxes middleBand = bands.getMiddleBand(); ILineSeries lineSeriesBollingerBandsMiddleBand = (ILineSeries) chart.getSeriesSet() .createSeries(SeriesType.LINE, Messages.LabelChartDetailIndicatorBollingerBands); lineSeriesBollingerBandsMiddleBand.setXDateSeries(middleBand.getDates()); lineSeriesBollingerBandsMiddleBand.setLineWidth(2); lineSeriesBollingerBandsMiddleBand.setLineStyle(LineStyle.DOT); lineSeriesBollingerBandsMiddleBand.setSymbolType(PlotSymbolType.NONE); lineSeriesBollingerBandsMiddleBand.setYSeries(middleBand.getValues()); lineSeriesBollingerBandsMiddleBand.setAntialias(swtAntialias); lineSeriesBollingerBandsMiddleBand.setLineColor(colorBollingerBands); lineSeriesBollingerBandsMiddleBand.setYAxisId(0); lineSeriesBollingerBandsMiddleBand.setVisibleInLegend(true); ChartLineSeriesAxes upperBand = bands.getUpperBand(); ILineSeries lineSeriesBollingerBandsUpperBand = (ILineSeries) chart.getSeriesSet().createSeries(SeriesType.LINE, Messages.LabelChartDetailIndicatorBollingerBandsUpper); lineSeriesBollingerBandsUpperBand.setXDateSeries(upperBand.getDates()); lineSeriesBollingerBandsUpperBand.setLineWidth(2); lineSeriesBollingerBandsUpperBand.setLineStyle(LineStyle.SOLID); lineSeriesBollingerBandsUpperBand.setSymbolType(PlotSymbolType.NONE); lineSeriesBollingerBandsUpperBand.setYSeries(upperBand.getValues()); lineSeriesBollingerBandsUpperBand.setAntialias(swtAntialias); lineSeriesBollingerBandsUpperBand.setLineColor(colorBollingerBands); lineSeriesBollingerBandsUpperBand.setYAxisId(0); lineSeriesBollingerBandsUpperBand.setVisibleInLegend(false); } private void addFIFOPurchasePrice(ChartInterval chartInterval) { // securities w/o currency (e.g. index) cannot be bought and hence have // no purchase price if (security.getCurrencyCode() == null) return; // create a list of dates that are relevant for FIFO purchase price // changes (i.e. all purchase and sell events) Client filteredClient = new ClientSecurityFilter(security).filter(client); CurrencyConverter securityCurrency = converter.with(security.getCurrencyCode()); List<LocalDate> candidates = client.getPortfolios().stream() // .flatMap(p -> p.getTransactions().stream()) // .filter(t -> t.getSecurity().equals(security)) .filter(t -> !(t.getType() == PortfolioTransaction.Type.TRANSFER_IN || t.getType() == PortfolioTransaction.Type.TRANSFER_OUT)) .filter(t -> !t.getDateTime().toLocalDate().isAfter(chartInterval.getEnd())) .map(t -> chartInterval.contains(t.getDateTime()) ? t.getDateTime().toLocalDate() : chartInterval.getStart()) .distinct() // .sorted() // .collect(Collectors.toList()); // calculate FIFO purchase price for each event - separate lineSeries // per holding period List<Double> values = new ArrayList<>(); List<LocalDate> dates = new ArrayList<>(); int seriesCounter = 0; for (LocalDate eventDate : candidates) { Optional<Double> purchasePrice = getPurchasePrice(filteredClient, securityCurrency, eventDate); if (purchasePrice.isPresent()) { dates.add(eventDate); values.add(purchasePrice.get()); } else { if (!dates.isEmpty()) { // add previous value if the data series ends here (no more // future events) dates.add(eventDate); values.add(values.get(values.size() - 1)); createFIFOPurchaseLineSeries(values, dates, seriesCounter++); values.clear(); dates.clear(); } else if (dates.isEmpty()) { // if no holding period exists, then do not add the event at // all } } } // add today if needed getPurchasePrice(filteredClient, securityCurrency, chartInterval.getEnd()).ifPresent(price -> { dates.add(chartInterval.getEnd()); values.add(price); }); if (!dates.isEmpty()) createFIFOPurchaseLineSeries(values, dates, seriesCounter); } private void createFIFOPurchaseLineSeries(List<Double> values, List<LocalDate> dates, int seriesCounter) { String label = seriesCounter == 0 ? Messages.LabelChartDetailMarkerPurchaseFIFO : MessageFormat.format(Messages.LabelChartDetailMarkerPurchaseFIFOHoldingPeriod, seriesCounter + 1); ILineSeries series = (ILineSeries) chart.getSeriesSet().createSeries(SeriesType.LINE, label); series.setSymbolType(PlotSymbolType.NONE); series.setYAxisId(0); series.enableStep(true); configureSeriesPainter(series, TimelineChart.toJavaUtilDate(dates.toArray(new LocalDate[0])), Doubles.toArray(values), colorFifoPurchasePrice, 2, LineStyle.SOLID, false, seriesCounter == 0); } private void addMovingAveragePurchasePrice(ChartInterval chartInterval) { // securities w/o currency (e.g. index) cannot be bought and hence have // no purchase price if (security.getCurrencyCode() == null) return; // create a list of dates that are relevant for floating avg purchase // price // changes (i.e. all purchase and sell events) Client filteredClient = new ClientSecurityFilter(security).filter(client); CurrencyConverter securityCurrency = converter.with(security.getCurrencyCode()); List<LocalDate> candidates = client.getPortfolios().stream() // .flatMap(p -> p.getTransactions().stream()) // .filter(t -> t.getSecurity().equals(security)) .filter(t -> !(t.getType() == PortfolioTransaction.Type.TRANSFER_IN || t.getType() == PortfolioTransaction.Type.TRANSFER_OUT)) .filter(t -> !t.getDateTime().toLocalDate().isAfter(chartInterval.getEnd())) .map(t -> chartInterval.contains(t.getDateTime()) ? t.getDateTime().toLocalDate() : chartInterval.getStart()) .distinct() // .sorted() // .collect(Collectors.toList()); // calculate floating avg purchase price for each event - separate // lineSeries // per holding period List<Double> values = new ArrayList<>(); List<LocalDate> dates = new ArrayList<>(); int seriesCounter = 0; for (LocalDate eventDate : candidates) { Optional<Double> purchasePrice = getMovingAveragePurchasePrice(filteredClient, securityCurrency, eventDate); if (purchasePrice.isPresent()) { dates.add(eventDate); values.add(purchasePrice.get()); } else { if (!dates.isEmpty()) { // add previous value if the data series ends here (no more // future events) dates.add(eventDate); values.add(values.get(values.size() - 1)); createMovingAveragePurchaseLineSeries(values, dates, seriesCounter++); values.clear(); dates.clear(); } else if (dates.isEmpty()) { // if no holding period exists, then do not add the event at // all } } } // add today if needed getMovingAveragePurchasePrice(filteredClient, securityCurrency, chartInterval.getEnd()).ifPresent(price -> { dates.add(chartInterval.getEnd()); values.add(price); }); if (!dates.isEmpty()) createMovingAveragePurchaseLineSeries(values, dates, seriesCounter); } private void createMovingAveragePurchaseLineSeries(List<Double> values, List<LocalDate> dates, int seriesCounter) { String label = seriesCounter == 0 ? Messages.LabelChartDetailMarkerPurchaseMovingAverage : MessageFormat.format(Messages.LabelChartDetailMarkerPurchaseMovingAverageHoldingPeriod, seriesCounter + 1); ILineSeries series = (ILineSeries) chart.getSeriesSet().createSeries(SeriesType.LINE, label); series.setSymbolType(PlotSymbolType.NONE); series.setYAxisId(0); series.enableStep(true); configureSeriesPainter(series, TimelineChart.toJavaUtilDate(dates.toArray(new LocalDate[0])), Doubles.toArray(values), colorMovingAveragePurchasePrice, 2, LineStyle.SOLID, false, seriesCounter == 0); } private Optional<Double> getLatestPurchasePrice() { // securities w/o currency (e.g. index) cannot be bought and hence have // no purchase price if (security.getCurrencyCode() == null) return Optional.empty(); return getPurchasePrice(new ClientSecurityFilter(security).filter(client), converter.with(security.getCurrencyCode()), LocalDate.now()); } private Optional<Double> getPurchasePrice(Client filteredClient, CurrencyConverter currencyConverter, LocalDate date) { return SecurityPerformanceSnapshot .create(filteredClient, currencyConverter, Interval.of(LocalDate.MIN, date), SecurityPerformanceIndicator.Costs.class) .getRecord(security) // .filter(r -> !r.getFifoCostPerSharesHeld().isZero()) // .map(r -> r.getFifoCostPerSharesHeld().getAmount() / Values.Quote.divider()); } private Optional<Double> getMovingAveragePurchasePrice(Client filteredClient, CurrencyConverter currencyConverter, LocalDate date) { return SecurityPerformanceSnapshot .create(filteredClient, currencyConverter, Interval.of(LocalDate.MIN, date), SecurityPerformanceIndicator.Costs.class) .getRecord(security) // .filter(r -> !r.getFifoCostPerSharesHeld().isZero()) // .map(r -> r.getMovingAverageCostPerSharesHeld().getAmount() / Values.Quote.divider()); } }
name.abuchen.portfolio.ui/src/name/abuchen/portfolio/ui/views/SecuritiesChart.java
package name.abuchen.portfolio.ui.views; import java.text.DecimalFormat; import java.time.Instant; import java.time.LocalDate; import java.time.LocalDateTime; import java.time.Period; import java.time.ZoneId; import java.time.ZonedDateTime; import java.time.format.DateTimeFormatter; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Comparator; import java.util.Date; import java.util.EnumSet; import java.util.List; import java.util.Optional; import java.util.stream.Collectors; import org.eclipse.jface.action.Action; import org.eclipse.jface.action.IAction; import org.eclipse.jface.action.IMenuManager; import org.eclipse.jface.action.MenuManager; import org.eclipse.jface.action.ToolBarManager; import org.eclipse.swt.SWT; import org.eclipse.swt.events.PaintListener; import org.eclipse.swt.graphics.Color; import org.eclipse.swt.graphics.Point; import org.eclipse.swt.layout.FillLayout; import org.eclipse.swt.widgets.Composite; import org.eclipse.swt.widgets.Control; import org.eclipse.swt.widgets.Display; import org.eclipse.swt.widgets.Label; import org.swtchart.IAxis; import org.swtchart.ILegend; import org.swtchart.ILineSeries; import org.swtchart.ILineSeries.PlotSymbolType; import org.swtchart.ISeries; import org.swtchart.ISeries.SeriesType; import org.swtchart.LineStyle; import org.swtchart.Range; import com.google.common.primitives.Doubles; import com.ibm.icu.text.MessageFormat; import name.abuchen.portfolio.model.AccountTransaction; import name.abuchen.portfolio.model.AttributeType; import name.abuchen.portfolio.model.Client; import name.abuchen.portfolio.model.LimitPrice; import name.abuchen.portfolio.model.PortfolioTransaction; import name.abuchen.portfolio.model.Security; import name.abuchen.portfolio.model.SecurityEvent; import name.abuchen.portfolio.model.SecurityPrice; import name.abuchen.portfolio.model.Transaction; import name.abuchen.portfolio.model.Transaction.Unit; import name.abuchen.portfolio.model.TransactionPair; import name.abuchen.portfolio.money.CurrencyConverter; import name.abuchen.portfolio.money.Money; import name.abuchen.portfolio.money.Values; import name.abuchen.portfolio.snapshot.ClientSnapshot; import name.abuchen.portfolio.snapshot.filter.ClientSecurityFilter; import name.abuchen.portfolio.snapshot.filter.ReadOnlyClient; import name.abuchen.portfolio.snapshot.security.SecurityPerformanceIndicator; import name.abuchen.portfolio.snapshot.security.SecurityPerformanceSnapshot; import name.abuchen.portfolio.ui.Images; import name.abuchen.portfolio.ui.Messages; import name.abuchen.portfolio.ui.util.Colors; import name.abuchen.portfolio.ui.util.DropDown; import name.abuchen.portfolio.ui.util.SimpleAction; import name.abuchen.portfolio.ui.util.chart.TimelineChart; import name.abuchen.portfolio.ui.util.chart.TimelineChartToolTip; import name.abuchen.portfolio.util.FormatHelper; import name.abuchen.portfolio.util.Interval; import name.abuchen.portfolio.util.TradeCalendar; import name.abuchen.portfolio.util.TradeCalendarManager; /** * Chart of historical quotes for a given security */ public class SecuritiesChart { /** * A <em>closed</em> interval that includes start and end date. * <p/> * We create a separate {@code ChartInterval} class because - for historical * reasons - the {@link Interval} is half-open, i.e. it does not include the * start date. When working with charts, however, it is easier to work with * a closed interval that includes both start and end date. */ public static class ChartInterval { private final LocalDate start; private final LocalDate end; public ChartInterval(LocalDate start, LocalDate end) { this.start = start; this.end = end; } public LocalDate getStart() { return start; } public LocalDate getEnd() { return end; } public boolean contains(LocalDate other) { return !other.isBefore(start) && !other.isAfter(end); } public boolean contains(LocalDateTime other) { return contains(other.toLocalDate()); } } public enum IntervalOption { M1(Messages.SecurityTabChart1M, Messages.SecurityTabChart1MToolTip), // M2(Messages.SecurityTabChart2M, Messages.SecurityTabChart2MToolTip), // M6(Messages.SecurityTabChart6M, Messages.SecurityTabChart6MToolTip), // Y1(Messages.SecurityTabChart1Y, Messages.SecurityTabChart1YToolTip), // Y2(Messages.SecurityTabChart2Y, Messages.SecurityTabChart2YToolTip), // Y3(Messages.SecurityTabChart3Y, Messages.SecurityTabChart3YToolTip), // Y5(Messages.SecurityTabChart5Y, Messages.SecurityTabChart5YToolTip), // Y10(Messages.SecurityTabChart10Y, Messages.SecurityTabChart10YToolTip), // YTD(Messages.SecurityTabChartYTD, Messages.SecurityTabChartYTDToolTip), // H(Messages.SecurityTabChartHoldingPeriod, Messages.SecurityTabChartHoldingPeriodToolTip), // ALL(Messages.SecurityTabChartAll, Messages.SecurityTabChartAllToolTip); private final String label; private final String tooltip; private IntervalOption(String label, String tooltip) { this.label = label; this.tooltip = tooltip; } public String getLabel() { return label; } public String getTooltip() { return tooltip; } public ChartInterval getInverval(Client client, CurrencyConverter converter, Security security) { LocalDate now = LocalDate.now(); switch (this) { case M1: return new ChartInterval(now.minus(Period.ofMonths(1)), now); case M2: return new ChartInterval(now.minus(Period.ofMonths(2)), now); case M6: return new ChartInterval(now.minus(Period.ofMonths(6)), now); case Y1: return new ChartInterval(now.minus(Period.ofYears(1)), now); case Y2: return new ChartInterval(now.minus(Period.ofYears(2)), now); case Y3: return new ChartInterval(now.minus(Period.ofYears(3)), now); case Y5: return new ChartInterval(now.minus(Period.ofYears(5)), now); case Y10: return new ChartInterval(now.minus(Period.ofYears(10)), now); case YTD: return new ChartInterval(now.minus(Period.ofDays(now.getDayOfYear() - 1)), now); case H: List<TransactionPair<?>> tx = security.getTransactions(client); if (tx.isEmpty()) return new ChartInterval(now, now); Collections.sort(tx, new TransactionPair.ByDate()); boolean hasHoldings = ClientSnapshot.create(client, converter, LocalDate.now()) .getPositionsByVehicle().containsKey(security); return new ChartInterval(tx.get(0).getTransaction().getDateTime().toLocalDate(), hasHoldings ? LocalDate.now() : tx.get(tx.size() - 1).getTransaction().getDateTime().toLocalDate()); case ALL: List<SecurityPrice> prices = security.getPricesIncludingLatest(); if (prices.isEmpty()) return new ChartInterval(now, now); else return new ChartInterval(prices.get(0).getDate(), prices.get(prices.size() - 1).getDate()); default: throw new IllegalArgumentException(); } } } private enum ChartDetails { SCALING_LINEAR(Messages.LabelChartDetailChartScalingLinear), // SCALING_LOG(Messages.LabelChartDetailChartScalingLog), // CLOSING(Messages.LabelChartDetailChartDevelopmentClosing), // PURCHASEPRICE(Messages.LabelChartDetailChartDevelopmentClosingFIFO), // INVESTMENT(Messages.LabelChartDetailMarkerInvestments), // DIVIDENDS(Messages.LabelChartDetailMarkerDividends), // EVENTS(Messages.LabelChartDetailMarkerSplits), // EXTREMES(Messages.LabelChartDetailMarkerHighLow), // FIFOPURCHASE(Messages.LabelChartDetailMarkerPurchaseFIFO), // FLOATINGAVGPURCHASE(Messages.LabelChartDetailMarkerPurchaseMovingAverage), // BOLLINGERBANDS(Messages.LabelChartDetailIndicatorBollingerBands), // SMA_5DAYS(Messages.LabelChartDetailMovingAverage_5days), // SMA_20DAYS(Messages.LabelChartDetailMovingAverage_20days), // SMA_30DAYS(Messages.LabelChartDetailMovingAverage_30days), // SMA_38DAYS(Messages.LabelChartDetailMovingAverage_38days), // SMA_50DAYS(Messages.LabelChartDetailMovingAverage_50days), // SMA_90DAYS(Messages.LabelChartDetailMovingAverage_90days), // SMA_100DAYS(Messages.LabelChartDetailMovingAverage_100days), // SMA_200DAYS(Messages.LabelChartDetailMovingAverage_200days), // EMA_5DAYS(Messages.LabelChartDetailMovingAverage_5days), // EMA_20DAYS(Messages.LabelChartDetailMovingAverage_20days), // EMA_30DAYS(Messages.LabelChartDetailMovingAverage_30days), // EMA_38DAYS(Messages.LabelChartDetailMovingAverage_38days), // EMA_50DAYS(Messages.LabelChartDetailMovingAverage_50days), // EMA_90DAYS(Messages.LabelChartDetailMovingAverage_90days), // EMA_100DAYS(Messages.LabelChartDetailMovingAverage_100days), // EMA_200DAYS(Messages.LabelChartDetailMovingAverage_200days), // SHOW_MARKER_LINES(Messages.LabelChartDetailSettingsShowMarkerLines), // SHOW_DATA_LABELS(Messages.LabelChartDetailSettingsShowDataLabel), // SHOW_MISSING_TRADING_DAYS(Messages.LabelChartDetailSettingsShowMissingTradingDays), // SHOW_LIMITS(Messages.LabelChartDetailSettingsShowLimits); private final String label; private ChartDetails(String label) { this.label = label; } @Override public String toString() { return label; } } /* testing */ static class ChartRange { public final int start; public final int size; public final LocalDate startDate; public final LocalDate endDate; public ChartRange(int start, int end, LocalDate startDate, LocalDate endDate) { this.start = start; this.size = end - start; this.startDate = startDate; this.endDate = endDate; } /** * Maps the given {@link ChartInterval} to a range in the list of * security prices. Returns null if the interval does not intersect with * the list of prices. */ public static ChartRange createFor(List<SecurityPrice> prices, ChartInterval chartInterval) { int start = Collections.binarySearch(prices, new SecurityPrice(chartInterval.getStart(), 0), new SecurityPrice.ByDate()); if (start < 0) start = -start - 1; if (start >= prices.size()) return null; int end = Collections.binarySearch(prices, new SecurityPrice(chartInterval.getEnd(), 0), new SecurityPrice.ByDate()); if (end < 0) end = -end - 1; else end += 1; // include the entry that has been found if (end <= start) return null; return new ChartRange(start, end, prices.get(start).getDate(), prices.get(Math.min(end, prices.size() - 1)).getDate()); } } private Color colorQuote; private static final Color colorEventPurchase = Colors.getColor(26, 173, 33); private static final Color colorEventSale = Colors.getColor(232, 51, 69); private static final Color colorEventDividend = Colors.getColor(128, 0, 128); private static final Color colorHigh = Colors.getColor(0, 102, 0); private static final Color colorLow = Colors.getColor(128, 0, 0); private static final Color colorFifoPurchasePrice = Colors.getColor(226, 122, 121); private static final Color colorMovingAveragePurchasePrice = Colors.getColor(150, 82, 81); private static final Color colorBollingerBands = Colors.getColor(201, 141, 68); private static final Color colorSMA1 = Colors.getColor(179, 107, 107); // #B36B6B private static final Color colorSMA2 = Colors.getColor(179, 167, 107); // #B3A76B private static final Color colorSMA3 = Colors.getColor(131, 179, 107); // #83B36B private static final Color colorSMA4 = Colors.getColor(107, 179, 143); // #6BB38F private static final Color colorSMA5 = Colors.getColor(107, 155, 179); // #6B9BB3 private static final Color colorSMA6 = Colors.getColor(119, 107, 179); // #776BB3 private static final Color colorSMA7 = Colors.getColor(179, 107, 179); // #B36BB3 private static final Color colorEMA1 = Colors.getColor(200, 107, 107); // #C86B6B private static final Color colorEMA2 = Colors.getColor(200, 167, 107); // #C8A76B private static final Color colorEMA3 = Colors.getColor(131, 200, 107); // #83C86B private static final Color colorEMA4 = Colors.getColor(107, 200, 143); // #6BC88F private static final Color colorEMA5 = Colors.getColor(107, 155, 200); // #6B9BC8 private static final Color colorEMA6 = Colors.getColor(119, 107, 200); // #776BC8 private static final Color colorEMA7 = Colors.getColor(200, 107, 200); // #C86BB3 private static final Color colorAreaPositive = Colors.getColor(90, 114, 226); private static final Color colorAreaNegative = Colors.getColor(226, 91, 90); private static final Color colorNonTradingDay = Colors.getColor(255, 137, 89); private static final String PREF_KEY = "security-chart-details"; //$NON-NLS-1$ private DateTimeFormatter dateTimeFormatter = DateTimeFormatter.ofPattern("d LLL"); //$NON-NLS-1$ private Composite container; private Client client; private CurrencyConverter converter; private Security security; private TimelineChart chart; /** * Calculates dynamically for each security the interval of security prices * to be shown. */ private IntervalOption intervalOption = IntervalOption.Y2; private EnumSet<ChartDetails> chartConfig = EnumSet.of(ChartDetails.INVESTMENT, ChartDetails.EVENTS, ChartDetails.SCALING_LINEAR); private List<PaintListener> customPaintListeners = new ArrayList<>(); private List<PaintListener> customBehindPaintListener = new ArrayList<>(); private List<Transaction> customTooltipEvents = new ArrayList<>(); private int swtAntialias = SWT.ON; public SecuritiesChart(Composite parent, Client client, CurrencyConverter converter) { this.client = client; this.converter = converter; readChartConfig(client); container = new Composite(parent, SWT.NONE); container.setLayout(new FillLayout()); chart = new TimelineChart(container); chart.getTitle().setVisible(false); chart.getPlotArea().addPaintListener(event -> customPaintListeners.forEach(l -> l.paintControl(event))); chart.getPlotArea().addPaintListener(event -> customBehindPaintListener.forEach(l -> l.paintControl(event))); setupTooltip(); ILegend legend = chart.getLegend(); legend.setPosition(SWT.BOTTOM); legend.setVisible(true); } public IntervalOption getIntervalOption() { return intervalOption; } public void setIntervalOption(IntervalOption intervalOption) { this.intervalOption = intervalOption; } public void setQuoteColor(Color color) { this.colorQuote = color; } private void setupTooltip() { TimelineChartToolTip toolTip = chart.getToolTip(); toolTip.showToolTipOnlyForDatesInDataSeries(Messages.ColumnQuote); toolTip.setDefaultValueFormat(new DecimalFormat(Values.Quote.pattern())); toolTip.addSeriesExclude(Messages.LabelChartDetailChartDevelopment + "Positive"); //$NON-NLS-1$ toolTip.addSeriesExclude(Messages.LabelChartDetailChartDevelopment + "Negative"); //$NON-NLS-1$ toolTip.addSeriesExclude(Messages.LabelChartDetailChartDevelopment + "Zero"); //$NON-NLS-1$ toolTip.addSeriesExclude(Messages.SecurityMenuBuy); toolTip.addSeriesExclude(Messages.SecurityMenuBuy + "1"); //$NON-NLS-1$ toolTip.addSeriesExclude(Messages.SecurityMenuBuy + "2"); //$NON-NLS-1$ toolTip.addSeriesExclude(Messages.SecurityMenuSell); toolTip.addSeriesExclude(Messages.SecurityMenuSell + "1"); //$NON-NLS-1$ toolTip.addSeriesExclude(Messages.SecurityMenuSell + "2"); //$NON-NLS-1$ toolTip.addSeriesExclude(Messages.LabelChartDetailMarkerDividends); toolTip.addSeriesExclude(Messages.LabelChartDetailMarkerDividends + "1"); //$NON-NLS-1$ toolTip.addSeriesExclude(Messages.LabelChartDetailMarkerDividends + "2"); //$NON-NLS-1$ toolTip.addSeriesExclude(Messages.LabelChartDetailIndicatorBollingerBands); int precision = FormatHelper.getCalculatedQuoteDisplayPrecision(); DecimalFormat calculatedFormat = new DecimalFormat(Values.CalculatedQuote.pattern()); calculatedFormat.setMinimumFractionDigits(precision); calculatedFormat.setMaximumFractionDigits(precision); for (String period : new String[] { Messages.LabelChartDetailMovingAverage_5days, Messages.LabelChartDetailMovingAverage_20days, Messages.LabelChartDetailMovingAverage_30days, Messages.LabelChartDetailMovingAverage_38days, Messages.LabelChartDetailMovingAverage_50days, Messages.LabelChartDetailMovingAverage_100days, Messages.LabelChartDetailMovingAverage_200days, }) { toolTip.overrideValueFormat(String.format("%s (%s)", Messages.LabelChartDetailMovingAverageEMA, period), //$NON-NLS-1$ calculatedFormat); toolTip.overrideValueFormat(String.format("%s (%s)", Messages.LabelChartDetailMovingAverageSMA, period), //$NON-NLS-1$ calculatedFormat); } toolTip.overrideValueFormat(Messages.LabelChartDetailIndicatorBollingerBandsLower, calculatedFormat); toolTip.overrideValueFormat(Messages.LabelChartDetailIndicatorBollingerBandsUpper, calculatedFormat); toolTip.overrideValueFormat(Messages.LabelChartDetailMarkerPurchaseFIFO, calculatedFormat); toolTip.overrideValueFormat(Messages.LabelChartDetailMarkerPurchaseMovingAverage, calculatedFormat); toolTip.addExtraInfo((composite, focus) -> { if (focus instanceof Date) { Instant instant = ((Date) focus).toInstant(); ZonedDateTime zdt = instant.atZone(ZoneId.systemDefault()); LocalDate date = zdt.toLocalDate(); Interval displayInterval = Interval.of(date.minusDays(5), date.plusDays(5)); customTooltipEvents.stream() // .filter(t -> displayInterval.contains(t.getDateTime())) // .forEach(t -> { if (t instanceof AccountTransaction) addDividendTooltip(composite, (AccountTransaction) t); else if (t instanceof PortfolioTransaction) addInvestmentTooltip(composite, (PortfolioTransaction) t); }); } }); } private void addInvestmentTooltip(Composite composite, PortfolioTransaction t) { Label label = new Label(composite, SWT.NONE); label.setText(MessageFormat.format(Messages.LabelToolTipTransactionSummary, t.getType().toString(), dateTimeFormatter.format(t.getDateTime().toLocalDate()), t.getMonetaryAmount().toString())); label = new Label(composite, SWT.NONE); label.setText(MessageFormat.format(Messages.LabelToolTipInvestmentDetails, Values.Share.format(t.getShares()), Values.Quote.format( t.getGrossPricePerShare(converter.with(t.getSecurity().getCurrencyCode()))))); } private void addDividendTooltip(Composite composite, AccountTransaction t) { Label label = new Label(composite, SWT.NONE); String amount = t.getMonetaryAmount().toString(); label.setText(MessageFormat.format(Messages.LabelToolTipTransactionSummary, t.getType().toString(), dateTimeFormatter.format(t.getDateTime().toLocalDate()), amount)); if (t.getShares() == 0L) { label = new Label(composite, SWT.NONE); label.setText("\u2211 " + t.getGrossValue().toString()); //$NON-NLS-1$ } else { Optional<Unit> grossValue = t.getUnit(Unit.Type.GROSS_VALUE); long gross = grossValue.isPresent() ? grossValue.get().getForex().getAmount() : t.getGrossValueAmount(); String currency = grossValue.isPresent() ? grossValue.get().getForex().getCurrencyCode() : t.getCurrencyCode(); // gross value in either forex currency or transaction currency String grossAmount = Money.of(currency, gross).toString(); // gross value in transaction currency String grossValueAmount = Money.of(t.getCurrencyCode(), t.getGrossValueAmount()).toString(); // display gross value in transaction currency, different gross // value in security currency exists if (!grossValueAmount.equals(grossAmount)) { label = new Label(composite, SWT.NONE); label.setText(MessageFormat.format(Messages.LabelToolTipDividendDetailsGross, grossValueAmount)); } // display gross value, if different to net amount if (!grossAmount.equals(amount)) { label = new Label(composite, SWT.NONE); label.setText(MessageFormat.format(Messages.LabelToolTipDividendDetailsGross, grossAmount)); } // display dividend per share in security currency label = new Label(composite, SWT.NONE); label.setText(MessageFormat.format(Messages.LabelToolTipDividendDetails, Values.Share.format(t.getShares()), currency, Values.Quote.format(Math.round(gross * Values.Share.divider() * Values.Quote.factorToMoney() / t.getShares())))); } } private void configureSeriesPainter(ILineSeries series, Date[] dates, double[] values, Color color, int lineWidth, LineStyle lineStyle, boolean enableArea, boolean visibleInLegend) { if (lineWidth != 0) series.setLineWidth(lineWidth); series.setLineStyle(lineStyle); series.setXDateSeries(dates); series.enableArea(enableArea); series.setYSeries(values); series.setAntialias(swtAntialias); if (color != null) series.setLineColor(color); series.setVisibleInLegend(visibleInLegend); } private final void readChartConfig(Client client) { String pref = ReadOnlyClient.unwrap(client).getProperty(PREF_KEY); if (pref == null) return; chartConfig.clear(); for (String key : pref.split(",")) //$NON-NLS-1$ { try { chartConfig.add(ChartDetails.valueOf(key)); } catch (IllegalArgumentException ignore) { // do not print exception to the log as it confuses users. The // old SMA200 label has been renamed, nothing we can change // anymore } } } public void addButtons(ToolBarManager toolBar) { List<Action> viewActions = new ArrayList<>(); for (IntervalOption option : IntervalOption.values()) { SimpleAction action = new SimpleAction(option.getLabel(), IAction.AS_CHECK_BOX, option.getTooltip(), a -> { this.intervalOption = option; updateChart(); for (Action viewAction : viewActions) viewAction.setChecked(a.equals(viewAction)); }); if (intervalOption == option) action.setChecked(true); viewActions.add(action); toolBar.add(action); } toolBar.add(new DropDown(Messages.MenuConfigureChart, Images.CONFIG, SWT.NONE, this::chartConfigAboutToShow)); } private void chartConfigAboutToShow(IMenuManager manager) { MenuManager subMenuChartScaling = new MenuManager(Messages.LabelChartDetailChartScaling, null); MenuManager subMenuChartDevelopment = new MenuManager(Messages.LabelChartDetailChartDevelopment, null); MenuManager subMenuChartMarker = new MenuManager(Messages.LabelChartDetailMarker, null); MenuManager subMenuChartIndicator = new MenuManager(Messages.LabelChartDetailIndicator, null); MenuManager subMenuChartMovingAverage = new MenuManager(Messages.LabelChartDetailMovingAverage, null); MenuManager subMenuChartMovingAverageSMA = new MenuManager(Messages.LabelChartDetailMovingAverageSMA, null); MenuManager subMenuChartMovingAverageEMA = new MenuManager(Messages.LabelChartDetailMovingAverageEMA, null); MenuManager subMenuChartSettings = new MenuManager(Messages.LabelChartDetailSettings, null); subMenuChartScaling.add(addMenuAction(ChartDetails.SCALING_LINEAR)); subMenuChartScaling.add(addMenuAction(ChartDetails.SCALING_LOG)); subMenuChartDevelopment.add(addMenuAction(ChartDetails.CLOSING)); subMenuChartDevelopment.add(addMenuAction(ChartDetails.PURCHASEPRICE)); subMenuChartMarker.add(addMenuAction(ChartDetails.INVESTMENT)); subMenuChartMarker.add(addMenuAction(ChartDetails.DIVIDENDS)); subMenuChartMarker.add(addMenuAction(ChartDetails.EVENTS)); subMenuChartMarker.add(addMenuAction(ChartDetails.EXTREMES)); subMenuChartMarker.add(addMenuAction(ChartDetails.FIFOPURCHASE)); subMenuChartMarker.add(addMenuAction(ChartDetails.FLOATINGAVGPURCHASE)); subMenuChartMarker.add(addMenuAction(ChartDetails.SHOW_LIMITS)); subMenuChartIndicator.add(addMenuAction(ChartDetails.BOLLINGERBANDS)); subMenuChartMovingAverageSMA.add(addMenuAction(ChartDetails.SMA_5DAYS)); subMenuChartMovingAverageSMA.add(addMenuAction(ChartDetails.SMA_20DAYS)); subMenuChartMovingAverageSMA.add(addMenuAction(ChartDetails.SMA_30DAYS)); subMenuChartMovingAverageSMA.add(addMenuAction(ChartDetails.SMA_38DAYS)); subMenuChartMovingAverageSMA.add(addMenuAction(ChartDetails.SMA_50DAYS)); subMenuChartMovingAverageSMA.add(addMenuAction(ChartDetails.SMA_90DAYS)); subMenuChartMovingAverageSMA.add(addMenuAction(ChartDetails.SMA_100DAYS)); subMenuChartMovingAverageSMA.add(addMenuAction(ChartDetails.SMA_200DAYS)); subMenuChartMovingAverageEMA.add(addMenuAction(ChartDetails.EMA_5DAYS)); subMenuChartMovingAverageEMA.add(addMenuAction(ChartDetails.EMA_20DAYS)); subMenuChartMovingAverageEMA.add(addMenuAction(ChartDetails.EMA_30DAYS)); subMenuChartMovingAverageEMA.add(addMenuAction(ChartDetails.EMA_38DAYS)); subMenuChartMovingAverageEMA.add(addMenuAction(ChartDetails.EMA_50DAYS)); subMenuChartMovingAverageEMA.add(addMenuAction(ChartDetails.EMA_90DAYS)); subMenuChartMovingAverageEMA.add(addMenuAction(ChartDetails.EMA_100DAYS)); subMenuChartMovingAverageEMA.add(addMenuAction(ChartDetails.EMA_200DAYS)); subMenuChartSettings.add(addMenuAction(ChartDetails.SHOW_MARKER_LINES)); subMenuChartSettings.add(addMenuAction(ChartDetails.SHOW_DATA_LABELS)); subMenuChartSettings.add(addMenuAction(ChartDetails.SHOW_MISSING_TRADING_DAYS)); manager.add(subMenuChartScaling); manager.add(subMenuChartDevelopment); manager.add(subMenuChartMarker); manager.add(subMenuChartIndicator); manager.add(subMenuChartMovingAverage); subMenuChartMovingAverage.add(subMenuChartMovingAverageSMA); subMenuChartMovingAverage.add(subMenuChartMovingAverageEMA); manager.add(subMenuChartSettings); } private Action addMenuAction(ChartDetails detail) { Action action = new SimpleAction(detail.toString(), a -> { boolean isActive = chartConfig.contains(detail); if (isActive) chartConfig.remove(detail); else chartConfig.add(detail); if (!isActive) { switch (detail) { case SCALING_LINEAR: chartConfig.remove(ChartDetails.SCALING_LOG); break; case SCALING_LOG: chartConfig.remove(ChartDetails.SCALING_LINEAR); chartConfig.remove(ChartDetails.PURCHASEPRICE); chartConfig.remove(ChartDetails.CLOSING); break; case CLOSING: chartConfig.remove(ChartDetails.PURCHASEPRICE); chartConfig.remove(ChartDetails.SCALING_LOG); break; case PURCHASEPRICE: chartConfig.remove(ChartDetails.CLOSING); chartConfig.remove(ChartDetails.SCALING_LOG); break; default: break; } } if (!chartConfig.contains(ChartDetails.SCALING_LINEAR) && !chartConfig.contains(ChartDetails.SCALING_LOG)) chartConfig.add(ChartDetails.SCALING_LINEAR); ReadOnlyClient.unwrap(client).setProperty(PREF_KEY, String.join(",", //$NON-NLS-1$ chartConfig.stream().map(ChartDetails::name).collect(Collectors.toList()))); updateChart(); }); action.setChecked(chartConfig.contains(detail)); return action; } public void updateChart(Client client, Security security) { this.client = client; this.security = security; updateChart(); } public Control getControl() { return container; } private void updateChart() { chart.setRedraw(false); try { // delete all line series (quotes + possibly moving average) ISeries[] series = chart.getSeriesSet().getSeries(); for (ISeries s : series) chart.getSeriesSet().deleteSeries(s.getId()); chart.clearMarkerLines(); chart.clearNonTradingDayMarker(); customPaintListeners.clear(); customBehindPaintListener.clear(); customTooltipEvents.clear(); if (security == null || security.getPrices().isEmpty()) { chart.redraw(); return; } boolean showAreaRelativeToFirstQuote = chartConfig.contains(ChartDetails.CLOSING) || chartConfig.contains(ChartDetails.PURCHASEPRICE); // determine the interval to be shown in the chart ChartInterval chartInterval = intervalOption.getInverval(client, converter, security); // determine index range for given interval in prices list List<SecurityPrice> prices = security.getPricesIncludingLatest(); ChartRange range = ChartRange.createFor(prices, chartInterval); if (range == null) { chart.redraw(); return; } // prepare value arrays LocalDate[] dates = new LocalDate[range.size]; double[] values = new double[range.size]; double[] valuesRelative = new double[range.size]; double[] valuesRelativePositive = new double[range.size]; double[] valuesRelativeNegative = new double[range.size]; double[] valuesZeroLine = new double[range.size]; double firstQuote = 0; // Disable SWT antialias for more than 1000 records due to SWT // performance issue in Drawing swtAntialias = range.size > 1000 ? SWT.OFF : SWT.ON; if (!chartConfig.contains(ChartDetails.PURCHASEPRICE)) { SecurityPrice p2 = prices.get(range.start); firstQuote = (p2.getValue() / Values.Quote.divider()); } else { Optional<Double> purchasePrice = getLatestPurchasePrice(); if (purchasePrice.isPresent()) firstQuote = purchasePrice.get(); else showAreaRelativeToFirstQuote = false; } addChartMarkerBackground(chartInterval, range); for (int ii = 0; ii < range.size; ii++) { SecurityPrice p = prices.get(ii + range.start); dates[ii] = p.getDate(); values[ii] = p.getValue() / Values.Quote.divider(); if (showAreaRelativeToFirstQuote) { valuesRelative[ii] = (p.getValue() / Values.Quote.divider()) - firstQuote; valuesZeroLine[ii] = 0; if (valuesRelative[ii] >= 0) { valuesRelativePositive[ii] = valuesRelative[ii]; valuesRelativeNegative[ii] = 0; } else { valuesRelativePositive[ii] = 0; valuesRelativeNegative[ii] = valuesRelative[ii]; } } } Date[] javaDates = TimelineChart.toJavaUtilDate(dates); if (showAreaRelativeToFirstQuote) { ILineSeries lineSeries2ndNegative = (ILineSeries) chart.getSeriesSet().createSeries(SeriesType.LINE, Messages.LabelChartDetailChartDevelopmentClosing + "Negative"); //$NON-NLS-1$ lineSeries2ndNegative.setSymbolType(PlotSymbolType.NONE); lineSeries2ndNegative.setYAxisId(1); configureSeriesPainter(lineSeries2ndNegative, javaDates, valuesRelativeNegative, colorAreaNegative, 1, LineStyle.SOLID, true, false); ILineSeries lineSeries2ndPositive = (ILineSeries) chart.getSeriesSet().createSeries(SeriesType.LINE, Messages.LabelChartDetailChartDevelopmentClosing + "Positive"); //$NON-NLS-1$ lineSeries2ndPositive.setSymbolType(PlotSymbolType.NONE); lineSeries2ndPositive.setYAxisId(1); configureSeriesPainter(lineSeries2ndPositive, javaDates, valuesRelativePositive, colorAreaPositive, 1, LineStyle.SOLID, true, false); } ILineSeries lineSeries = (ILineSeries) chart.getSeriesSet().createSeries(SeriesType.LINE, Messages.ColumnQuote); lineSeries.setSymbolType(PlotSymbolType.NONE); configureSeriesPainter(lineSeries, javaDates, values, colorQuote, 2, LineStyle.SOLID, !showAreaRelativeToFirstQuote, false); chart.adjustRange(); addChartMarkerForeground(chartInterval); chart.adjustRange(); IAxis yAxis1st = chart.getAxisSet().getYAxis(0); IAxis yAxis2nd = chart.getAxisSet().getYAxis(1); yAxis2nd.setRange( new Range(yAxis1st.getRange().lower - firstQuote, yAxis1st.getRange().upper - firstQuote)); yAxis1st.enableLogScale(chartConfig.contains(ChartDetails.SCALING_LOG)); yAxis2nd.enableLogScale(chartConfig.contains(ChartDetails.SCALING_LOG)); yAxis1st.getTick().setVisible(true); if (chartConfig.contains(ChartDetails.SHOW_MISSING_TRADING_DAYS)) { TradeCalendar tradeCalendar = TradeCalendarManager.getInstance(security); List<LocalDate> calendarDates = new ArrayList<>(); for (LocalDate calendarDate = dates[0]; calendarDate .isBefore(dates[dates.length - 1]); calendarDate = calendarDate.plusDays(1)) calendarDates.add(calendarDate); for (LocalDate pricingDate : dates) calendarDates.remove(pricingDate); for (LocalDate targetDate : calendarDates) { if (!tradeCalendar.isHoliday(targetDate)) chart.addNonTradingDayMarker(targetDate, colorNonTradingDay); } } } finally { chart.setRedraw(true); chart.redraw(); } } private void addChartMarkerBackground(ChartInterval chartInterval, ChartRange range) { if (chartConfig.contains(ChartDetails.BOLLINGERBANDS)) addBollingerBandsMarkerLines(chartInterval, 20, 2); if (chartConfig.contains(ChartDetails.SMA_5DAYS)) addSMAMarkerLines(chartInterval, Messages.LabelChartDetailMovingAverageSMA, Messages.LabelChartDetailMovingAverage_5days, 5, colorSMA1); if (chartConfig.contains(ChartDetails.SMA_20DAYS)) addSMAMarkerLines(chartInterval, Messages.LabelChartDetailMovingAverageSMA, Messages.LabelChartDetailMovingAverage_20days, 20, colorSMA2); if (chartConfig.contains(ChartDetails.SMA_30DAYS)) addSMAMarkerLines(chartInterval, Messages.LabelChartDetailMovingAverageSMA, Messages.LabelChartDetailMovingAverage_30days, 30, colorSMA3); if (chartConfig.contains(ChartDetails.SMA_38DAYS)) addSMAMarkerLines(chartInterval, Messages.LabelChartDetailMovingAverageSMA, Messages.LabelChartDetailMovingAverage_38days, 38, colorSMA4); if (chartConfig.contains(ChartDetails.SMA_50DAYS)) addSMAMarkerLines(chartInterval, Messages.LabelChartDetailMovingAverageSMA, Messages.LabelChartDetailMovingAverage_50days, 50, colorSMA4); if (chartConfig.contains(ChartDetails.SMA_90DAYS)) addSMAMarkerLines(chartInterval, Messages.LabelChartDetailMovingAverageSMA, Messages.LabelChartDetailMovingAverage_90days, 90, colorSMA5); if (chartConfig.contains(ChartDetails.SMA_100DAYS)) addSMAMarkerLines(chartInterval, Messages.LabelChartDetailMovingAverageSMA, Messages.LabelChartDetailMovingAverage_100days, 100, colorSMA6); if (chartConfig.contains(ChartDetails.SMA_200DAYS)) addSMAMarkerLines(chartInterval, Messages.LabelChartDetailMovingAverageSMA, Messages.LabelChartDetailMovingAverage_200days, 200, colorSMA7); if (chartConfig.contains(ChartDetails.EMA_5DAYS)) addEMAMarkerLines(chartInterval, Messages.LabelChartDetailMovingAverageEMA, Messages.LabelChartDetailMovingAverage_5days, 5, colorEMA1); if (chartConfig.contains(ChartDetails.EMA_20DAYS)) addEMAMarkerLines(chartInterval, Messages.LabelChartDetailMovingAverageEMA, Messages.LabelChartDetailMovingAverage_20days, 20, colorEMA2); if (chartConfig.contains(ChartDetails.EMA_30DAYS)) addEMAMarkerLines(chartInterval, Messages.LabelChartDetailMovingAverageEMA, Messages.LabelChartDetailMovingAverage_30days, 30, colorEMA3); if (chartConfig.contains(ChartDetails.EMA_38DAYS)) addEMAMarkerLines(chartInterval, Messages.LabelChartDetailMovingAverageEMA, Messages.LabelChartDetailMovingAverage_38days, 38, colorEMA4); if (chartConfig.contains(ChartDetails.EMA_50DAYS)) addEMAMarkerLines(chartInterval, Messages.LabelChartDetailMovingAverageEMA, Messages.LabelChartDetailMovingAverage_50days, 50, colorEMA4); if (chartConfig.contains(ChartDetails.EMA_90DAYS)) addEMAMarkerLines(chartInterval, Messages.LabelChartDetailMovingAverageEMA, Messages.LabelChartDetailMovingAverage_90days, 90, colorEMA5); if (chartConfig.contains(ChartDetails.EMA_100DAYS)) addEMAMarkerLines(chartInterval, Messages.LabelChartDetailMovingAverageEMA, Messages.LabelChartDetailMovingAverage_100days, 100, colorEMA6); if (chartConfig.contains(ChartDetails.EMA_200DAYS)) addEMAMarkerLines(chartInterval, Messages.LabelChartDetailMovingAverageEMA, Messages.LabelChartDetailMovingAverage_200days, 200, colorEMA7); if (chartConfig.contains(ChartDetails.SHOW_LIMITS)) addLimitLines(chartInterval, range); } private void addChartMarkerForeground(ChartInterval chartInterval) { if (chartConfig.contains(ChartDetails.FIFOPURCHASE)) addFIFOPurchasePrice(chartInterval); if (chartConfig.contains(ChartDetails.FLOATINGAVGPURCHASE)) addMovingAveragePurchasePrice(chartInterval); if (chartConfig.contains(ChartDetails.INVESTMENT)) addInvestmentMarkerLines(chartInterval); if (chartConfig.contains(ChartDetails.DIVIDENDS)) addDividendMarkerLines(chartInterval); if (chartConfig.contains(ChartDetails.EVENTS)) addEventMarkerLines(chartInterval); if (chartConfig.contains(ChartDetails.EXTREMES)) addExtremesMarkerLines(chartInterval); } private void addLimitLines(ChartInterval chartInterval, ChartRange range) { this.security.getAttributes().getMap().forEach((key, val) -> { // null OR not Limit Price --> ignore if (val == null || val.getClass() != LimitPrice.class) return; LimitPrice limitAttribute = (LimitPrice) val; Optional<AttributeType> attributeName = client.getSettings().getAttributeTypes() .filter(attr -> attr.getId().equals(key)).findFirst(); // could not find name of limit attribute --> don't draw if (attributeName.isEmpty()) return; String lineID = attributeName.get().getName() + " (" + limitAttribute.toString() + ")"; //$NON-NLS-1$ //$NON-NLS-2$ // horizontal line: only two points required LocalDate[] dates = new LocalDate[2]; dates[0] = range.startDate; dates[1] = range.endDate; // both points with same y-value double[] values = new double[2]; values[0] = values[1] = limitAttribute.getValue() / Values.Quote.divider(); ILineSeries lineSeriesLimit = (ILineSeries) chart.getSeriesSet().createSeries(SeriesType.LINE, lineID); lineSeriesLimit.setXDateSeries(TimelineChart.toJavaUtilDate(dates)); lineSeriesLimit.setLineWidth(2); lineSeriesLimit.setLineStyle(LineStyle.DASH); lineSeriesLimit.enableArea(false); lineSeriesLimit.setSymbolType(PlotSymbolType.NONE); lineSeriesLimit.setYSeries(values); lineSeriesLimit.setAntialias(swtAntialias); lineSeriesLimit.setLineColor(Colors.ICON_ORANGE); lineSeriesLimit.setYAxisId(0); lineSeriesLimit.setVisibleInLegend(true); }); } private void addSMAMarkerLines(ChartInterval chartInterval, String smaSeries, String smaDaysWording, int smaDays, Color smaColor) { ChartLineSeriesAxes smaLines = new SimpleMovingAverage(smaDays, this.security, chartInterval).getSMA(); if (smaLines == null || smaLines.getValues() == null || smaLines.getDates() == null) return; @SuppressWarnings("nls") String lineID = smaSeries + " (" + smaDaysWording + ")"; ILineSeries lineSeriesSMA = (ILineSeries) chart.getSeriesSet().createSeries(SeriesType.LINE, lineID); lineSeriesSMA.setXDateSeries(smaLines.getDates()); lineSeriesSMA.setLineWidth(2); lineSeriesSMA.enableArea(false); lineSeriesSMA.setSymbolType(PlotSymbolType.NONE); lineSeriesSMA.setYSeries(smaLines.getValues()); lineSeriesSMA.setAntialias(swtAntialias); lineSeriesSMA.setLineColor(smaColor); lineSeriesSMA.setYAxisId(0); lineSeriesSMA.setVisibleInLegend(true); } private void addEMAMarkerLines(ChartInterval chartInterval, String emaSeries, String emaDaysWording, int emaDays, Color emaColor) { ChartLineSeriesAxes emaLines = new ExponentialMovingAverage(emaDays, this.security, chartInterval).getEMA(); if (emaLines == null || emaLines.getValues() == null || emaLines.getDates() == null) return; @SuppressWarnings("nls") String lineID = emaSeries + " (" + emaDaysWording + ")"; ILineSeries lineSeriesEMA = (ILineSeries) chart.getSeriesSet().createSeries(SeriesType.LINE, lineID); lineSeriesEMA.setXDateSeries(emaLines.getDates()); lineSeriesEMA.setLineWidth(2); lineSeriesEMA.enableArea(false); lineSeriesEMA.setSymbolType(PlotSymbolType.NONE); lineSeriesEMA.setYSeries(emaLines.getValues()); lineSeriesEMA.setAntialias(swtAntialias); lineSeriesEMA.setLineColor(emaColor); lineSeriesEMA.setYAxisId(0); lineSeriesEMA.setVisibleInLegend(true); } private void addInvestmentMarkerLines(ChartInterval chartInterval) { List<PortfolioTransaction> purchase = client.getPortfolios().stream().flatMap(p -> p.getTransactions().stream()) .filter(t -> t.getSecurity() == security) .filter(t -> t.getType() == PortfolioTransaction.Type.BUY || t.getType() == PortfolioTransaction.Type.DELIVERY_INBOUND) .filter(t -> chartInterval.contains(t.getDateTime())) // .sorted(new Transaction.ByDate()).collect(Collectors.toList()); addInvestmentMarkers(purchase, Messages.SecurityMenuBuy, colorEventPurchase); List<PortfolioTransaction> sales = client.getPortfolios().stream().flatMap(p -> p.getTransactions().stream()) .filter(t -> t.getSecurity() == security) .filter(t -> t.getType() == PortfolioTransaction.Type.SELL || t.getType() == PortfolioTransaction.Type.DELIVERY_OUTBOUND) .filter(t -> chartInterval.contains(t.getDateTime())) // .sorted(new Transaction.ByDate()).collect(Collectors.toList()); addInvestmentMarkers(sales, Messages.SecurityMenuSell, colorEventSale); } private void addInvestmentMarkers(List<PortfolioTransaction> transactions, String seriesLabel, Color color) { if (transactions.isEmpty()) return; customTooltipEvents.addAll(transactions); if (chartConfig.contains(ChartDetails.SHOW_MARKER_LINES)) { transactions.forEach(t -> { String label = Values.Share.format(t.getType().isPurchase() ? t.getShares() : -t.getShares()); double value = t.getGrossPricePerShare(converter.with(t.getSecurity().getCurrencyCode())).getAmount() / Values.Quote.divider(); chart.addMarkerLine(t.getDateTime().toLocalDate(), color, label, value); }); } else { Date[] dates = transactions.stream().map(PortfolioTransaction::getDateTime) .map(d -> Date.from(d.atZone(ZoneId.systemDefault()).toInstant())) .collect(Collectors.toList()).toArray(new Date[0]); double[] values = transactions.stream().mapToDouble( t -> t.getGrossPricePerShare(converter.with(t.getSecurity().getCurrencyCode())).getAmount() / Values.Quote.divider()) .toArray(); ILineSeries border = (ILineSeries) chart.getSeriesSet().createSeries(SeriesType.LINE, seriesLabel + "2"); //$NON-NLS-1$ border.setYAxisId(0); border.setSymbolColor(Display.getDefault().getSystemColor(SWT.COLOR_BLACK)); border.setSymbolType(PlotSymbolType.DIAMOND); border.setSymbolSize(7); configureSeriesPainter(border, dates, values, null, 0, LineStyle.NONE, false, false); ILineSeries background = (ILineSeries) chart.getSeriesSet().createSeries(SeriesType.LINE, seriesLabel + "1"); //$NON-NLS-1$ background.setYAxisId(0); background.setSymbolType(PlotSymbolType.DIAMOND); background.setSymbolSize(6); background.setSymbolColor(Display.getDefault().getSystemColor(SWT.COLOR_WHITE)); configureSeriesPainter(background, dates, values, null, 0, LineStyle.NONE, false, false); ILineSeries inner = (ILineSeries) chart.getSeriesSet().createSeries(SeriesType.LINE, seriesLabel); inner.setYAxisId(0); inner.setSymbolType(PlotSymbolType.DIAMOND); inner.setSymbolSize(4); inner.setSymbolColor(color); configureSeriesPainter(inner, dates, values, color, 0, LineStyle.NONE, false, true); if (chartConfig.contains(ChartDetails.SHOW_DATA_LABELS)) { customPaintListeners.add(event -> { IAxis xAxis = chart.getAxisSet().getXAxis(0); IAxis yAxis = chart.getAxisSet().getYAxis(0); for (int index = 0; index < dates.length; index++) { int x = xAxis.getPixelCoordinate(dates[index].getTime()); int y = yAxis.getPixelCoordinate(values[index]); PortfolioTransaction t = transactions.get(index); String label = Values.Share.format(t.getType().isPurchase() ? t.getShares() : -t.getShares()); Point textExtent = event.gc.textExtent(label); event.gc.setForeground(Colors.theme().defaultForeground()); event.gc.drawText(label, x - (textExtent.x / 2), y + border.getSymbolSize(), true); } }); } } } private void addDividendMarkerLines(ChartInterval chartInterval) { List<AccountTransaction> dividends = client.getAccounts().stream().flatMap(a -> a.getTransactions().stream()) .filter(t -> t.getSecurity() == security) .filter(t -> t.getType() == AccountTransaction.Type.DIVIDENDS) .filter(t -> chartInterval.contains(t.getDateTime())).sorted(new Transaction.ByDate()) .collect(Collectors.toList()); if (dividends.isEmpty()) return; customTooltipEvents.addAll(dividends); if (chartConfig.contains(ChartDetails.SHOW_MARKER_LINES)) { dividends.forEach(t -> chart.addMarkerLine(t.getDateTime().toLocalDate(), colorEventDividend, getDividendLabel(t))); } else { Date[] dates = dividends.stream().map(AccountTransaction::getDateTime) .map(d -> Date.from(d.atZone(ZoneId.systemDefault()).toInstant())) .collect(Collectors.toList()).toArray(new Date[0]); IAxis yAxis1st = chart.getAxisSet().getYAxis(0); double yAxis1stAxisPrice = yAxis1st.getRange().lower; double[] values = new double[dates.length]; Arrays.fill(values, yAxis1stAxisPrice); ILineSeries border = (ILineSeries) chart.getSeriesSet().createSeries(SeriesType.LINE, Messages.LabelChartDetailMarkerDividends + "2"); //$NON-NLS-1$ border.setYAxisId(0); border.setSymbolType(PlotSymbolType.SQUARE); border.setSymbolSize(6); border.setSymbolColor(Display.getDefault().getSystemColor(SWT.COLOR_BLACK)); configureSeriesPainter(border, dates, values, null, 0, LineStyle.NONE, false, false); ILineSeries background = (ILineSeries) chart.getSeriesSet().createSeries(SeriesType.LINE, Messages.LabelChartDetailMarkerDividends + "1"); //$NON-NLS-1$ background.setYAxisId(0); background.setSymbolType(PlotSymbolType.SQUARE); background.setSymbolSize(5); background.setSymbolColor(Display.getDefault().getSystemColor(SWT.COLOR_WHITE)); configureSeriesPainter(background, dates, values, null, 0, LineStyle.NONE, false, false); ILineSeries inner = (ILineSeries) chart.getSeriesSet().createSeries(SeriesType.LINE, Messages.LabelChartDetailMarkerDividends); inner.setYAxisId(0); inner.setSymbolType(PlotSymbolType.SQUARE); inner.setSymbolSize(3); inner.setSymbolColor(colorEventDividend); configureSeriesPainter(inner, dates, values, null, 0, LineStyle.NONE, false, true); if (chartConfig.contains(ChartDetails.SHOW_DATA_LABELS)) { customPaintListeners.add(event -> { IAxis xAxis = chart.getAxisSet().getXAxis(0); IAxis yAxis = chart.getAxisSet().getYAxis(0); int yPosLabel = 0; int lastWriteLabelLevel1 = 0; int lastWriteLabelLevel2 = 0; int lastWriteLabelLevel3 = 0; for (int index = 0; index < dates.length; index++) { boolean freeSpaceForLabelLevel1 = true; boolean freeSpaceForLabelLevel2 = true; boolean freeSpaceForLabelLevel3 = true; int x = xAxis.getPixelCoordinate(dates[index].getTime()); int y = yAxis.getPixelCoordinate(values[index]); String label = getDividendLabel(dividends.get(index)); Point textExtent = event.gc.textExtent(label); event.gc.setForeground(Colors.theme().defaultForeground()); if (((x - (textExtent.x / 2)) - lastWriteLabelLevel1) <= 0) freeSpaceForLabelLevel1 = false; if (((x - (textExtent.x / 2)) - lastWriteLabelLevel2) <= 0) freeSpaceForLabelLevel2 = false; if (((x - (textExtent.x / 2)) - lastWriteLabelLevel3) <= 0) freeSpaceForLabelLevel3 = false; if (freeSpaceForLabelLevel1 || freeSpaceForLabelLevel2 || freeSpaceForLabelLevel3) { if (freeSpaceForLabelLevel1) { yPosLabel = y - textExtent.y - border.getSymbolSize(); lastWriteLabelLevel1 = (x + (textExtent.x / 2)); } if (freeSpaceForLabelLevel2 && !freeSpaceForLabelLevel1) { yPosLabel = yPosLabel - textExtent.y; lastWriteLabelLevel2 = (x + (textExtent.x / 2)); } if (freeSpaceForLabelLevel3 && !freeSpaceForLabelLevel2 && !freeSpaceForLabelLevel1) { yPosLabel = yPosLabel - textExtent.y; lastWriteLabelLevel3 = (x + (textExtent.x / 2)); } event.gc.drawText(label, x - (textExtent.x / 2), yPosLabel, true); } } }); } } } private String getDividendLabel(AccountTransaction t) { if (t.getShares() == 0L) { return "\u2211 " + t.getGrossValue().toString(); //$NON-NLS-1$ } else { Optional<Unit> grossValue = t.getUnit(Unit.Type.GROSS_VALUE); long gross = grossValue.isPresent() ? grossValue.get().getForex().getAmount() : t.getGrossValueAmount(); long perShare = Math.round(gross * Values.Share.divider() * Values.Quote.factorToMoney() / t.getShares()); return Values.Quote.format(perShare); } } private void addEventMarkerLines(ChartInterval chartInterval) { security.getEvents().stream() // .filter(e -> chartInterval.contains(e.getDate())) // .filter(e -> e.getType() != SecurityEvent.Type.DIVIDEND_PAYMENT) // .forEach(e -> chart.addMarkerLine(e.getDate(), Display.getDefault().getSystemColor(SWT.COLOR_DARK_GRAY), e.getDetails())); } private void addExtremesMarkerLines(ChartInterval chartInterval) { Optional<SecurityPrice> max = security.getPricesIncludingLatest().stream() // .filter(p -> chartInterval.contains(p.getDate())) // .max(Comparator.comparing(SecurityPrice::getValue)); Optional<SecurityPrice> min = security.getPricesIncludingLatest().stream() // .filter(p -> chartInterval.contains(p.getDate())) // .min(Comparator.comparing(SecurityPrice::getValue)); max.ifPresent(high -> addExtremeMarker(high, PlotSymbolType.TRIANGLE, // Messages.LabelChartDetailMarkerHigh, colorHigh)); min.ifPresent(low -> addExtremeMarker(low, PlotSymbolType.INVERTED_TRIANGLE, // Messages.LabelChartDetailMarkerLow, colorLow)); } private void addExtremeMarker(SecurityPrice price, PlotSymbolType plotSymbolType, String seriesLabel, Color color) { LocalDate eventDate = price.getDate(); String valueFormat = Values.Quote.format(price.getValue()); double value = price.getValue() / Values.Quote.divider(); if (chartConfig.contains(ChartDetails.SHOW_MARKER_LINES)) { chart.addMarkerLine(eventDate, color, valueFormat); } else { Date zonedDate = Date.from(eventDate.atStartOfDay(ZoneId.systemDefault()).toInstant()); ILineSeries inner = (ILineSeries) chart.getSeriesSet().createSeries(SeriesType.LINE, seriesLabel); inner.setYAxisId(0); inner.setSymbolType(plotSymbolType); inner.setSymbolSize(6); inner.setSymbolColor(color); configureSeriesPainter(inner, new Date[] { zonedDate }, new double[] { value }, color, 0, LineStyle.NONE, false, true); if (chartConfig.contains(ChartDetails.SHOW_DATA_LABELS)) { customPaintListeners.add(event -> { IAxis xAxis = chart.getAxisSet().getXAxis(0); IAxis yAxis = chart.getAxisSet().getYAxis(0); int x = xAxis.getPixelCoordinate(zonedDate.getTime()); int y = yAxis.getPixelCoordinate(value); Point textExtent = event.gc.textExtent(valueFormat); event.gc.setForeground(Colors.theme().defaultForeground()); if (inner.getSymbolColor() == colorHigh) y = y - textExtent.y - inner.getSymbolSize(); else y = y + inner.getSymbolSize(); event.gc.drawText(valueFormat, x - (textExtent.x / 2), y, true); }); } } } private void addBollingerBandsMarkerLines(ChartInterval chartInterval, int bollingerBandsDays, double bollingerBandsFactor) { BollingerBands bands = new BollingerBands(bollingerBandsDays, bollingerBandsFactor, this.security, chartInterval); ChartLineSeriesAxes lowerBand = bands.getLowerBand(); if (lowerBand == null || lowerBand.getValues() == null || lowerBand.getDates() == null) return; ILineSeries lineSeriesBollingerBandsLowerBand = (ILineSeries) chart.getSeriesSet().createSeries(SeriesType.LINE, Messages.LabelChartDetailIndicatorBollingerBandsLower); lineSeriesBollingerBandsLowerBand.setXDateSeries(lowerBand.getDates()); lineSeriesBollingerBandsLowerBand.setLineStyle(LineStyle.SOLID); lineSeriesBollingerBandsLowerBand.setLineWidth(2); lineSeriesBollingerBandsLowerBand.setSymbolType(PlotSymbolType.NONE); lineSeriesBollingerBandsLowerBand.setYSeries(lowerBand.getValues()); lineSeriesBollingerBandsLowerBand.setAntialias(swtAntialias); lineSeriesBollingerBandsLowerBand.setLineColor(colorBollingerBands); lineSeriesBollingerBandsLowerBand.setYAxisId(0); lineSeriesBollingerBandsLowerBand.setVisibleInLegend(false); ChartLineSeriesAxes middleBand = bands.getMiddleBand(); ILineSeries lineSeriesBollingerBandsMiddleBand = (ILineSeries) chart.getSeriesSet() .createSeries(SeriesType.LINE, Messages.LabelChartDetailIndicatorBollingerBands); lineSeriesBollingerBandsMiddleBand.setXDateSeries(middleBand.getDates()); lineSeriesBollingerBandsMiddleBand.setLineWidth(2); lineSeriesBollingerBandsMiddleBand.setLineStyle(LineStyle.DOT); lineSeriesBollingerBandsMiddleBand.setSymbolType(PlotSymbolType.NONE); lineSeriesBollingerBandsMiddleBand.setYSeries(middleBand.getValues()); lineSeriesBollingerBandsMiddleBand.setAntialias(swtAntialias); lineSeriesBollingerBandsMiddleBand.setLineColor(colorBollingerBands); lineSeriesBollingerBandsMiddleBand.setYAxisId(0); lineSeriesBollingerBandsMiddleBand.setVisibleInLegend(true); ChartLineSeriesAxes upperBand = bands.getUpperBand(); ILineSeries lineSeriesBollingerBandsUpperBand = (ILineSeries) chart.getSeriesSet().createSeries(SeriesType.LINE, Messages.LabelChartDetailIndicatorBollingerBandsUpper); lineSeriesBollingerBandsUpperBand.setXDateSeries(upperBand.getDates()); lineSeriesBollingerBandsUpperBand.setLineWidth(2); lineSeriesBollingerBandsUpperBand.setLineStyle(LineStyle.SOLID); lineSeriesBollingerBandsUpperBand.setSymbolType(PlotSymbolType.NONE); lineSeriesBollingerBandsUpperBand.setYSeries(upperBand.getValues()); lineSeriesBollingerBandsUpperBand.setAntialias(swtAntialias); lineSeriesBollingerBandsUpperBand.setLineColor(colorBollingerBands); lineSeriesBollingerBandsUpperBand.setYAxisId(0); lineSeriesBollingerBandsUpperBand.setVisibleInLegend(false); } private void addFIFOPurchasePrice(ChartInterval chartInterval) { // securities w/o currency (e.g. index) cannot be bought and hence have // no purchase price if (security.getCurrencyCode() == null) return; // create a list of dates that are relevant for FIFO purchase price // changes (i.e. all purchase and sell events) Client filteredClient = new ClientSecurityFilter(security).filter(client); CurrencyConverter securityCurrency = converter.with(security.getCurrencyCode()); List<LocalDate> candidates = client.getPortfolios().stream() // .flatMap(p -> p.getTransactions().stream()) // .filter(t -> t.getSecurity().equals(security)) .filter(t -> !(t.getType() == PortfolioTransaction.Type.TRANSFER_IN || t.getType() == PortfolioTransaction.Type.TRANSFER_OUT)) .filter(t -> !t.getDateTime().toLocalDate().isAfter(chartInterval.getEnd())) .map(t -> chartInterval.contains(t.getDateTime()) ? t.getDateTime().toLocalDate() : chartInterval.getStart()) .distinct() // .sorted() // .collect(Collectors.toList()); // calculate FIFO purchase price for each event - separate lineSeries // per holding period List<Double> values = new ArrayList<>(); List<LocalDate> dates = new ArrayList<>(); int seriesCounter = 0; for (LocalDate eventDate : candidates) { Optional<Double> purchasePrice = getPurchasePrice(filteredClient, securityCurrency, eventDate); if (purchasePrice.isPresent()) { dates.add(eventDate); values.add(purchasePrice.get()); } else { if (!dates.isEmpty()) { // add previous value if the data series ends here (no more // future events) dates.add(eventDate); values.add(values.get(values.size() - 1)); createFIFOPurchaseLineSeries(values, dates, seriesCounter++); values.clear(); dates.clear(); } else if (dates.isEmpty()) { // if no holding period exists, then do not add the event at // all } } } // add today if needed getPurchasePrice(filteredClient, securityCurrency, chartInterval.getEnd()).ifPresent(price -> { dates.add(chartInterval.getEnd()); values.add(price); }); if (!dates.isEmpty()) createFIFOPurchaseLineSeries(values, dates, seriesCounter); } private void createFIFOPurchaseLineSeries(List<Double> values, List<LocalDate> dates, int seriesCounter) { String label = seriesCounter == 0 ? Messages.LabelChartDetailMarkerPurchaseFIFO : MessageFormat.format(Messages.LabelChartDetailMarkerPurchaseFIFOHoldingPeriod, seriesCounter + 1); ILineSeries series = (ILineSeries) chart.getSeriesSet().createSeries(SeriesType.LINE, label); series.setSymbolType(PlotSymbolType.NONE); series.setYAxisId(0); series.enableStep(true); configureSeriesPainter(series, TimelineChart.toJavaUtilDate(dates.toArray(new LocalDate[0])), Doubles.toArray(values), colorFifoPurchasePrice, 2, LineStyle.SOLID, false, seriesCounter == 0); } private void addMovingAveragePurchasePrice(ChartInterval chartInterval) { // securities w/o currency (e.g. index) cannot be bought and hence have // no purchase price if (security.getCurrencyCode() == null) return; // create a list of dates that are relevant for floating avg purchase // price // changes (i.e. all purchase and sell events) Client filteredClient = new ClientSecurityFilter(security).filter(client); CurrencyConverter securityCurrency = converter.with(security.getCurrencyCode()); List<LocalDate> candidates = client.getPortfolios().stream() // .flatMap(p -> p.getTransactions().stream()) // .filter(t -> t.getSecurity().equals(security)) .filter(t -> !(t.getType() == PortfolioTransaction.Type.TRANSFER_IN || t.getType() == PortfolioTransaction.Type.TRANSFER_OUT)) .filter(t -> !t.getDateTime().toLocalDate().isAfter(chartInterval.getEnd())) .map(t -> chartInterval.contains(t.getDateTime()) ? t.getDateTime().toLocalDate() : chartInterval.getStart()) .distinct() // .sorted() // .collect(Collectors.toList()); // calculate floating avg purchase price for each event - separate // lineSeries // per holding period List<Double> values = new ArrayList<>(); List<LocalDate> dates = new ArrayList<>(); int seriesCounter = 0; for (LocalDate eventDate : candidates) { Optional<Double> purchasePrice = getMovingAveragePurchasePrice(filteredClient, securityCurrency, eventDate); if (purchasePrice.isPresent()) { dates.add(eventDate); values.add(purchasePrice.get()); } else { if (!dates.isEmpty()) { // add previous value if the data series ends here (no more // future events) dates.add(eventDate); values.add(values.get(values.size() - 1)); createMovingAveragePurchaseLineSeries(values, dates, seriesCounter++); values.clear(); dates.clear(); } else if (dates.isEmpty()) { // if no holding period exists, then do not add the event at // all } } } // add today if needed getMovingAveragePurchasePrice(filteredClient, securityCurrency, chartInterval.getEnd()).ifPresent(price -> { dates.add(chartInterval.getEnd()); values.add(price); }); if (!dates.isEmpty()) createMovingAveragePurchaseLineSeries(values, dates, seriesCounter); } private void createMovingAveragePurchaseLineSeries(List<Double> values, List<LocalDate> dates, int seriesCounter) { String label = seriesCounter == 0 ? Messages.LabelChartDetailMarkerPurchaseMovingAverage : MessageFormat.format(Messages.LabelChartDetailMarkerPurchaseMovingAverageHoldingPeriod, seriesCounter + 1); ILineSeries series = (ILineSeries) chart.getSeriesSet().createSeries(SeriesType.LINE, label); series.setSymbolType(PlotSymbolType.NONE); series.setYAxisId(0); series.enableStep(true); configureSeriesPainter(series, TimelineChart.toJavaUtilDate(dates.toArray(new LocalDate[0])), Doubles.toArray(values), colorMovingAveragePurchasePrice, 2, LineStyle.SOLID, false, seriesCounter == 0); } private Optional<Double> getLatestPurchasePrice() { // securities w/o currency (e.g. index) cannot be bought and hence have // no purchase price if (security.getCurrencyCode() == null) return Optional.empty(); return getPurchasePrice(new ClientSecurityFilter(security).filter(client), converter.with(security.getCurrencyCode()), LocalDate.now()); } private Optional<Double> getPurchasePrice(Client filteredClient, CurrencyConverter currencyConverter, LocalDate date) { return SecurityPerformanceSnapshot .create(filteredClient, currencyConverter, Interval.of(LocalDate.MIN, date), SecurityPerformanceIndicator.Costs.class) .getRecord(security) // .filter(r -> !r.getFifoCostPerSharesHeld().isZero()) // .map(r -> r.getFifoCostPerSharesHeld().getAmount() / Values.Quote.divider()); } private Optional<Double> getMovingAveragePurchasePrice(Client filteredClient, CurrencyConverter currencyConverter, LocalDate date) { return SecurityPerformanceSnapshot .create(filteredClient, currencyConverter, Interval.of(LocalDate.MIN, date), SecurityPerformanceIndicator.Costs.class) .getRecord(security) // .filter(r -> !r.getFifoCostPerSharesHeld().isZero()) // .map(r -> r.getMovingAverageCostPerSharesHeld().getAmount() / Values.Quote.divider()); } }
Fixed limit prices not shown in chart if data is filtered Issue: #2455 #2456
name.abuchen.portfolio.ui/src/name/abuchen/portfolio/ui/views/SecuritiesChart.java
Fixed limit prices not shown in chart if data is filtered
<ide><path>ame.abuchen.portfolio.ui/src/name/abuchen/portfolio/ui/views/SecuritiesChart.java <ide> <ide> LimitPrice limitAttribute = (LimitPrice) val; <ide> <del> Optional<AttributeType> attributeName = client.getSettings().getAttributeTypes() <add> Optional<AttributeType> attributeName = ReadOnlyClient.unwrap(client) // unwrap because ReadOnlyClient only contains/provides default attributes <add> .getSettings().getAttributeTypes() <ide> .filter(attr -> attr.getId().equals(key)).findFirst(); <ide> // could not find name of limit attribute --> don't draw <ide> if (attributeName.isEmpty())
Java
mit
b729ffa333d407523f36446d865c1529290358cf
0
devsunny/app-galleries,devsunny/app-galleries,devsunny/app-galleries
package com.asksunny.tls; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.OutputStream; import java.io.PrintWriter; import java.security.cert.Certificate; import java.security.cert.X509Certificate; import java.util.concurrent.atomic.AtomicBoolean; import javax.net.ssl.SSLPeerUnverifiedException; import javax.net.ssl.SSLServerSocket; import javax.net.ssl.SSLServerSocketFactory; import javax.net.ssl.SSLSocket; /** * * <pre> * keytool -genkey -keypass "changeit" -dname "CN=Sample Cert, OU=R&D, O=Company Ltd., L=New York City, S=NY, C=US" * -keyalg RSA -alias tomcat -keystore selfsigned.jks -validity 3650 -keysize 2048 -storepass "changeit" * </pre> * * @author SunnyLiu * */ public class TLSSocketServer { public static final String KEYSTORE_PASS = "changeit"; public static final String TRUSTSTORE_PASS = "changeit"; public static final String DEFAULT_SSL_PROTOCOL = "TLSv1.2"; private int port; private AtomicBoolean serving = new AtomicBoolean(true); public TLSSocketServer(int port) { this.port = port; } public void serve() throws Exception { SSLServerSocketFactory sslServerSocketFactory = AliasSpecificSSLSocketFactory.getSSLServerSocketFactory( TLSSocketServer.class.getResourceAsStream("/server_keystore.jks"), KEYSTORE_PASS, null, TLSSocketServer.class.getResourceAsStream("/client_keystore.jks"), TRUSTSTORE_PASS); SSLServerSocket serverSock = (SSLServerSocket) sslServerSocketFactory.createServerSocket(port); serverSock.setNeedClientAuth(true); while (serving.get()) { SSLSocket socket = (SSLSocket) serverSock.accept(); socket.setEnableSessionCreation(true); handleConnection(socket); } } protected void handleConnection(SSLSocket socket) { try { String principal = getPrincipal(socket); System.out.println(principal); InputStream in = socket.getInputStream(); OutputStream out = socket.getOutputStream(); BufferedReader br = new BufferedReader(new InputStreamReader(in)); PrintWriter writer = new PrintWriter(out); String line = null; while ((line = br.readLine()) != null) { if (line.trim().equals("bye")) { break; } else if (line.trim().equals("shutdown")) { this.serving.set(false); break; } else { writer.println(line); writer.flush(); } } } catch (Exception ex) { ex.printStackTrace(); } finally { try { socket.close(); } catch (IOException e) { e.printStackTrace(); } } } protected String getPrincipal(SSLSocket socket) throws SSLPeerUnverifiedException { String principal = null; Certificate[] certs = socket.getSession().getPeerCertificates(); for (int i = 0; i < certs.length; i++) { if(certs[i] instanceof X509Certificate){ principal = ((X509Certificate)certs[i]).getIssuerDN().getName(); break; } } return principal; } public static void main(String[] args) throws Exception { TLSSocketServer server = new TLSSocketServer(8889); server.serve(); } }
tls-authentication/src/main/java/com/asksunny/tls/TLSSocketServer.java
package com.asksunny.tls; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.OutputStream; import java.io.PrintWriter; import java.security.cert.Certificate; import java.security.cert.X509Certificate; import java.util.concurrent.atomic.AtomicBoolean; import javax.net.ssl.SSLPeerUnverifiedException; import javax.net.ssl.SSLServerSocket; import javax.net.ssl.SSLServerSocketFactory; import javax.net.ssl.SSLSocket; public class TLSSocketServer { public static final String KEYSTORE_PASS = "changeit"; public static final String TRUSTSTORE_PASS = "changeit"; public static final String DEFAULT_SSL_PROTOCOL = "TLSv1.2"; private int port; private AtomicBoolean serving = new AtomicBoolean(true); public TLSSocketServer(int port) { this.port = port; } public void serve() throws Exception { SSLServerSocketFactory sslServerSocketFactory = AliasSpecificSSLSocketFactory.getSSLServerSocketFactory( TLSSocketServer.class.getResourceAsStream("/server_keystore.jks"), KEYSTORE_PASS, null, TLSSocketServer.class.getResourceAsStream("/client_keystore.jks"), TRUSTSTORE_PASS); SSLServerSocket serverSock = (SSLServerSocket) sslServerSocketFactory.createServerSocket(port); serverSock.setNeedClientAuth(true); while (serving.get()) { SSLSocket socket = (SSLSocket) serverSock.accept(); socket.setEnableSessionCreation(true); handleConnection(socket); } } protected void handleConnection(SSLSocket socket) { try { String principal = getPrincipal(socket); System.out.println(principal); InputStream in = socket.getInputStream(); OutputStream out = socket.getOutputStream(); BufferedReader br = new BufferedReader(new InputStreamReader(in)); PrintWriter writer = new PrintWriter(out); String line = null; while ((line = br.readLine()) != null) { if (line.trim().equals("bye")) { break; } else if (line.trim().equals("shutdown")) { this.serving.set(false); break; } else { writer.println(line); writer.flush(); } } } catch (Exception ex) { ex.printStackTrace(); } finally { try { socket.close(); } catch (IOException e) { e.printStackTrace(); } } } protected String getPrincipal(SSLSocket socket) throws SSLPeerUnverifiedException { String principal = null; Certificate[] certs = socket.getSession().getPeerCertificates(); for (int i = 0; i < certs.length; i++) { if(certs[i] instanceof X509Certificate){ principal = ((X509Certificate)certs[i]).getIssuerDN().getName(); break; } } return principal; } public static void main(String[] args) throws Exception { TLSSocketServer server = new TLSSocketServer(8889); server.serve(); } }
Add keygen command
tls-authentication/src/main/java/com/asksunny/tls/TLSSocketServer.java
Add keygen command
<ide><path>ls-authentication/src/main/java/com/asksunny/tls/TLSSocketServer.java <ide> import javax.net.ssl.SSLServerSocket; <ide> import javax.net.ssl.SSLServerSocketFactory; <ide> import javax.net.ssl.SSLSocket; <del> <add>/** <add> * <add> * <pre> <add> * keytool -genkey -keypass "changeit" -dname "CN=Sample Cert, OU=R&D, O=Company Ltd., L=New York City, S=NY, C=US" <add> * -keyalg RSA -alias tomcat -keystore selfsigned.jks -validity 3650 -keysize 2048 -storepass "changeit" <add> * </pre> <add> * <add> * @author SunnyLiu <add> * <add> */ <ide> public class TLSSocketServer { <ide> <ide> public static final String KEYSTORE_PASS = "changeit";
Java
mit
2ba12b35bb1946c7bc9bec54673afd7aef896c82
0
funIntentions/flow,funIntentions/project-smart-grid
package com.projects.view; import com.projects.helper.Constants; import com.projects.model.Building; import com.projects.model.UsageTimeSpan; import javafx.collections.ListChangeListener; import javafx.fxml.FXML; import javafx.scene.chart.LineChart; import javafx.scene.chart.XYChart; import javafx.scene.control.TabPane; import javafx.scene.control.TableColumn; import javafx.scene.control.TableView; import javafx.scene.control.cell.CheckBoxTableCell; import javafx.scene.control.cell.TextFieldTableCell; import javafx.stage.Stage; import javafx.util.converter.DoubleStringConverter; import javafx.util.converter.LocalTimeStringConverter; import javax.swing.text.NumberFormatter; import java.awt.image.BufferedImage; import java.beans.EventHandler; import java.time.LocalTime; import java.time.format.DateTimeFormatter; import java.util.ArrayList; import java.util.List; /** * Created by Dan on 8/21/2015. */ public class LoadProfileEditDialogController { @FXML private LineChart<String, Float> loadProfileChart; @FXML private TabPane daysOfTheWeekTabPane; @FXML private TableView<UsageTimeSpan> usageTable; @FXML private TableColumn<UsageTimeSpan, Boolean> mondayColumn; @FXML private TableColumn<UsageTimeSpan, Boolean> tuesdayColumn; @FXML private TableColumn<UsageTimeSpan, Boolean> wednesdayColumn; @FXML private TableColumn<UsageTimeSpan, Boolean> thursdayColumn; @FXML private TableColumn<UsageTimeSpan, Boolean> fridayColumn; @FXML private TableColumn<UsageTimeSpan, Boolean> saturdayColumn; @FXML private TableColumn<UsageTimeSpan, Boolean> sundayColumn; @FXML private TableColumn<UsageTimeSpan, LocalTime> usageFromColumn; @FXML private TableColumn<UsageTimeSpan, LocalTime> usageToColumn; @FXML private TableColumn<UsageTimeSpan, Double> usageColumn; private XYChart.Series<String, Float> series = new XYChart.Series<>(); private Building building = null; private Stage dialogStage = null; @FXML private void initialize() { usageTable.setEditable(true); loadProfileChart.getData().add(series); usageColumn.setCellValueFactory(cellData -> cellData.getValue().usageProperty().asObject()); usageFromColumn.setCellValueFactory(cellData -> cellData.getValue().fromProperty()); usageToColumn.setCellValueFactory(cellData -> cellData.getValue().toProperty()); mondayColumn.setCellValueFactory(cellData -> cellData.getValue().mondayProperty()); tuesdayColumn.setCellValueFactory(cellData -> cellData.getValue().tuesdayProperty()); wednesdayColumn.setCellValueFactory(cellData -> cellData.getValue().wednesdayProperty()); thursdayColumn.setCellValueFactory(cellData -> cellData.getValue().thursdayProperty()); fridayColumn.setCellValueFactory(cellData -> cellData.getValue().fridayProperty()); saturdayColumn.setCellValueFactory(cellData -> cellData.getValue().saturdayProperty()); sundayColumn.setCellValueFactory(cellData -> cellData.getValue().sundayProperty()); DateTimeFormatter dateTimeFormatter = DateTimeFormatter.ofPattern(Constants.HOURS_AND_MINUTES_FORMAT); LocalTimeStringConverter localTimeStringConverter = new LocalTimeStringConverter(dateTimeFormatter, dateTimeFormatter); DoubleStringConverter doubleStringConverter = new DoubleStringConverter(); usageColumn.setCellFactory(TextFieldTableCell.<UsageTimeSpan, Double>forTableColumn(doubleStringConverter)); usageColumn.setOnEditCommit((TableColumn.CellEditEvent<UsageTimeSpan, Double> t) -> { (t.getTableView().getItems().get(t.getTablePosition().getRow())).setUsage(t.getNewValue()); }); usageFromColumn.setCellFactory(TextFieldTableCell.<UsageTimeSpan, LocalTime>forTableColumn(localTimeStringConverter)); usageFromColumn.setOnEditCommit((TableColumn.CellEditEvent<UsageTimeSpan, LocalTime> t) -> { (t.getTableView().getItems().get(t.getTablePosition().getRow())).setFrom(t.getNewValue()); }); usageToColumn.setCellFactory(TextFieldTableCell.<UsageTimeSpan,LocalTime>forTableColumn(localTimeStringConverter)); usageToColumn.setOnEditCommit((TableColumn.CellEditEvent<UsageTimeSpan, LocalTime> t) -> { (t.getTableView().getItems().get(t.getTablePosition().getRow())).setTo(t.getNewValue()); }); mondayColumn.setCellFactory(CheckBoxTableCell.forTableColumn(mondayColumn)); tuesdayColumn.setCellFactory(CheckBoxTableCell.forTableColumn(tuesdayColumn)); wednesdayColumn.setCellFactory(CheckBoxTableCell.forTableColumn(wednesdayColumn)); thursdayColumn.setCellFactory(CheckBoxTableCell.forTableColumn(thursdayColumn)); fridayColumn.setCellFactory(CheckBoxTableCell.forTableColumn(fridayColumn)); saturdayColumn.setCellFactory(CheckBoxTableCell.forTableColumn(saturdayColumn)); sundayColumn.setCellFactory(CheckBoxTableCell.forTableColumn(sundayColumn)); daysOfTheWeekTabPane.getSelectionModel().selectedItemProperty().addListener( (observable, oldValue, newValue) -> updateChartData()); } public void setBuilding(Building building) { this.building = building; usageTable.setItems(building.getManualLoadProfileData()); initChartData(); building.getManualLoadProfileData().addListener(new ListChangeListener<UsageTimeSpan>() { @Override public void onChanged(Change<? extends UsageTimeSpan> c) { updateChartData(); } }); } public void setDialogStage(Stage dialogStage) { this.dialogStage = dialogStage; } private void initChartData() { building.calculateLoadProfile(); int day = daysOfTheWeekTabPane.getSelectionModel().getSelectedIndex(); List<Float> loadProfile = building.getLoadProfilesForWeek() != null && building.getLoadProfilesForWeek().size() > 0 ? building.getLoadProfilesForWeek().get(day): new ArrayList<>(); loadProfileChart.setAnimated(false); series.getData().clear(); loadProfileChart.setAnimated(true); for (int i = 0; i < Constants.MINUTES_IN_DAY; i+=30) { series.getData().add(new XYChart.Data<>(String.valueOf(i/30), loadProfile.get(i))); } } private void updateChartData() { building.calculateLoadProfile(); int day = daysOfTheWeekTabPane.getSelectionModel().getSelectedIndex(); List<Float> loadProfile = building.getLoadProfilesForWeek() != null && building.getLoadProfilesForWeek().size() > 0 ? building.getLoadProfilesForWeek().get(day): new ArrayList<>(); for (int i = 0; i < series.getData().size(); ++i) { XYChart.Data<String, Float> data = series.getData().get(i); if (data.getYValue().floatValue() != loadProfile.get(i * 30).floatValue()) series.getData().set(i, new XYChart.Data<>(String.valueOf(i), loadProfile.get(i * 30))); } } @FXML private void handleCreateTimeSpan() { UsageTimeSpan timeSpan = new UsageTimeSpan(0, LocalTime.ofSecondOfDay(0), LocalTime.ofSecondOfDay(0)); timeSpan.usageProperty().addListener((observable, oldValue, newValue) -> {updateChartData();}); timeSpan.fromProperty().addListener((observable, oldValue, newValue) -> {updateChartData();}); timeSpan.toProperty().addListener((observable, oldValue, newValue) -> { updateChartData(); }); timeSpan.mondayProperty().addListener((observable, oldValue, newValue) -> {updateChartData();}); timeSpan.tuesdayProperty().addListener((observable, oldValue, newValue) -> {updateChartData();}); timeSpan.wednesdayProperty().addListener((observable, oldValue, newValue) -> {updateChartData();}); timeSpan.thursdayProperty().addListener((observable, oldValue, newValue) -> {updateChartData();}); timeSpan.fridayProperty().addListener((observable, oldValue, newValue) -> {updateChartData();}); timeSpan.saturdayProperty().addListener((observable, oldValue, newValue) -> {updateChartData();}); timeSpan.sundayProperty().addListener((observable, oldValue, newValue) -> { updateChartData(); }); usageTable.getItems().add(timeSpan); building.calculateLoadProfile(); } @FXML private void handleRemoveTimeSpan() { int index = usageTable.getSelectionModel().getSelectedIndex(); if (index >= 0) { usageTable.getItems().remove(index); building.calculateLoadProfile(); } } @FXML private void handleClose() { dialogStage.close(); } }
src/com/projects/view/LoadProfileEditDialogController.java
package com.projects.view; import com.projects.helper.Constants; import com.projects.model.Building; import com.projects.model.UsageTimeSpan; import javafx.collections.ListChangeListener; import javafx.fxml.FXML; import javafx.scene.chart.LineChart; import javafx.scene.chart.XYChart; import javafx.scene.control.TabPane; import javafx.scene.control.TableColumn; import javafx.scene.control.TableView; import javafx.scene.control.cell.CheckBoxTableCell; import javafx.scene.control.cell.TextFieldTableCell; import javafx.stage.Stage; import javafx.util.converter.DoubleStringConverter; import javafx.util.converter.LocalTimeStringConverter; import javax.swing.text.NumberFormatter; import java.awt.image.BufferedImage; import java.beans.EventHandler; import java.time.LocalTime; import java.time.format.DateTimeFormatter; import java.util.ArrayList; import java.util.List; /** * Created by Dan on 8/21/2015. */ public class LoadProfileEditDialogController { @FXML private LineChart<String, Float> loadProfileChart; @FXML private TabPane daysOfTheWeekTabPane; @FXML private TableView<UsageTimeSpan> usageTable; @FXML private TableColumn<UsageTimeSpan, Boolean> mondayColumn; @FXML private TableColumn<UsageTimeSpan, Boolean> tuesdayColumn; @FXML private TableColumn<UsageTimeSpan, Boolean> wednesdayColumn; @FXML private TableColumn<UsageTimeSpan, Boolean> thursdayColumn; @FXML private TableColumn<UsageTimeSpan, Boolean> fridayColumn; @FXML private TableColumn<UsageTimeSpan, Boolean> saturdayColumn; @FXML private TableColumn<UsageTimeSpan, Boolean> sundayColumn; @FXML private TableColumn<UsageTimeSpan, LocalTime> usageFromColumn; @FXML private TableColumn<UsageTimeSpan, LocalTime> usageToColumn; @FXML private TableColumn<UsageTimeSpan, Double> usageColumn; private XYChart.Series<String, Float> series = new XYChart.Series<>(); private Building building = null; private Stage dialogStage = null; @FXML private void initialize() { usageTable.setEditable(true); loadProfileChart.getData().add(series); usageColumn.setCellValueFactory(cellData -> cellData.getValue().usageProperty().asObject()); usageFromColumn.setCellValueFactory(cellData -> cellData.getValue().fromProperty()); usageToColumn.setCellValueFactory(cellData -> cellData.getValue().toProperty()); mondayColumn.setCellValueFactory(cellData -> cellData.getValue().mondayProperty()); tuesdayColumn.setCellValueFactory(cellData -> cellData.getValue().tuesdayProperty()); wednesdayColumn.setCellValueFactory(cellData -> cellData.getValue().wednesdayProperty()); thursdayColumn.setCellValueFactory(cellData -> cellData.getValue().thursdayProperty()); fridayColumn.setCellValueFactory(cellData -> cellData.getValue().fridayProperty()); saturdayColumn.setCellValueFactory(cellData -> cellData.getValue().saturdayProperty()); sundayColumn.setCellValueFactory(cellData -> cellData.getValue().sundayProperty()); DateTimeFormatter dateTimeFormatter = DateTimeFormatter.ofPattern(Constants.HOURS_AND_MINUTES_FORMAT); LocalTimeStringConverter localTimeStringConverter = new LocalTimeStringConverter(dateTimeFormatter, dateTimeFormatter); DoubleStringConverter doubleStringConverter = new DoubleStringConverter(); usageColumn.setCellFactory(TextFieldTableCell.<UsageTimeSpan, Double>forTableColumn(doubleStringConverter)); usageColumn.setOnEditCommit((TableColumn.CellEditEvent<UsageTimeSpan, Double> t) -> { (t.getTableView().getItems().get(t.getTablePosition().getRow())).setUsage(t.getNewValue()); }); usageFromColumn.setCellFactory(TextFieldTableCell.<UsageTimeSpan, LocalTime>forTableColumn(localTimeStringConverter)); usageFromColumn.setOnEditCommit((TableColumn.CellEditEvent<UsageTimeSpan, LocalTime> t) -> { (t.getTableView().getItems().get(t.getTablePosition().getRow())).setFrom(t.getNewValue()); }); usageToColumn.setCellFactory(TextFieldTableCell.<UsageTimeSpan,LocalTime>forTableColumn(localTimeStringConverter)); usageToColumn.setOnEditCommit((TableColumn.CellEditEvent<UsageTimeSpan, LocalTime> t) -> { (t.getTableView().getItems().get(t.getTablePosition().getRow())).setTo(t.getNewValue()); }); mondayColumn.setCellFactory(CheckBoxTableCell.forTableColumn(mondayColumn)); tuesdayColumn.setCellFactory(CheckBoxTableCell.forTableColumn(tuesdayColumn)); wednesdayColumn.setCellFactory(CheckBoxTableCell.forTableColumn(wednesdayColumn)); thursdayColumn.setCellFactory(CheckBoxTableCell.forTableColumn(thursdayColumn)); fridayColumn.setCellFactory(CheckBoxTableCell.forTableColumn(fridayColumn)); saturdayColumn.setCellFactory(CheckBoxTableCell.forTableColumn(saturdayColumn)); sundayColumn.setCellFactory(CheckBoxTableCell.forTableColumn(sundayColumn)); daysOfTheWeekTabPane.getSelectionModel().selectedItemProperty().addListener( (observable, oldValue, newValue) -> updateChartData()); } public void setBuilding(Building building) { this.building = building; usageTable.setItems(building.getManualLoadProfileData()); updateChartData(); building.getManualLoadProfileData().addListener(new ListChangeListener<UsageTimeSpan>() { @Override public void onChanged(Change<? extends UsageTimeSpan> c) { updateChartData(); } }); } public void setDialogStage(Stage dialogStage) { this.dialogStage = dialogStage; } private void updateChartData() { building.calculateLoadProfile(); int day = daysOfTheWeekTabPane.getSelectionModel().getSelectedIndex(); List<Float> loadProfile = building.getLoadProfilesForWeek() != null && building.getLoadProfilesForWeek().size() > 0 ? building.getLoadProfilesForWeek().get(day): new ArrayList<>(); loadProfileChart.setAnimated(false); series.getData().clear(); loadProfileChart.setAnimated(true); for (int i = 0; i < loadProfile.size(); i+=30) { series.getData().add(new XYChart.Data<>(String.valueOf(i), loadProfile.get(i))); } } @FXML private void handleCreateTimeSpan() { UsageTimeSpan timeSpan = new UsageTimeSpan(0, LocalTime.ofSecondOfDay(0), LocalTime.ofSecondOfDay(0)); timeSpan.usageProperty().addListener((observable, oldValue, newValue) -> {updateChartData();}); timeSpan.fromProperty().addListener((observable, oldValue, newValue) -> {updateChartData();}); timeSpan.toProperty().addListener((observable, oldValue, newValue) -> {updateChartData();}); timeSpan.mondayProperty().addListener((observable, oldValue, newValue) -> {updateChartData();}); timeSpan.tuesdayProperty().addListener((observable, oldValue, newValue) -> {updateChartData();}); timeSpan.wednesdayProperty().addListener((observable, oldValue, newValue) -> {updateChartData();}); timeSpan.thursdayProperty().addListener((observable, oldValue, newValue) -> {updateChartData();}); timeSpan.fridayProperty().addListener((observable, oldValue, newValue) -> {updateChartData();}); timeSpan.saturdayProperty().addListener((observable, oldValue, newValue) -> {updateChartData();}); timeSpan.sundayProperty().addListener((observable, oldValue, newValue) -> {updateChartData();}); usageTable.getItems().add(timeSpan); building.calculateLoadProfile(); } @FXML private void handleRemoveTimeSpan() { int index = usageTable.getSelectionModel().getSelectedIndex(); if (index >= 0) { usageTable.getItems().remove(index); building.calculateLoadProfile(); } } @FXML private void handleClose() { dialogStage.close(); } }
Animations while manually editing load profiles have been improved. Now only data on the chart that is affect by a change is animated instead of the entire chart being reset for each change.
src/com/projects/view/LoadProfileEditDialogController.java
Animations while manually editing load profiles have been improved. Now only data on the chart that is affect by a change is animated instead of the entire chart being reset for each change.
<ide><path>rc/com/projects/view/LoadProfileEditDialogController.java <ide> { <ide> this.building = building; <ide> usageTable.setItems(building.getManualLoadProfileData()); <del> updateChartData(); <add> initChartData(); <ide> <ide> building.getManualLoadProfileData().addListener(new ListChangeListener<UsageTimeSpan>() { <ide> @Override <ide> this.dialogStage = dialogStage; <ide> } <ide> <del> private void updateChartData() <add> private void initChartData() <ide> { <ide> building.calculateLoadProfile(); <ide> int day = daysOfTheWeekTabPane.getSelectionModel().getSelectedIndex(); <ide> series.getData().clear(); <ide> loadProfileChart.setAnimated(true); <ide> <del> for (int i = 0; i < loadProfile.size(); i+=30) <del> { <del> series.getData().add(new XYChart.Data<>(String.valueOf(i), loadProfile.get(i))); <add> for (int i = 0; i < Constants.MINUTES_IN_DAY; i+=30) <add> { <add> series.getData().add(new XYChart.Data<>(String.valueOf(i/30), loadProfile.get(i))); <add> } <add> } <add> <add> private void updateChartData() <add> { <add> building.calculateLoadProfile(); <add> int day = daysOfTheWeekTabPane.getSelectionModel().getSelectedIndex(); <add> <add> List<Float> loadProfile = building.getLoadProfilesForWeek() != null && building.getLoadProfilesForWeek().size() > 0 ? building.getLoadProfilesForWeek().get(day): new ArrayList<>(); <add> <add> for (int i = 0; i < series.getData().size(); ++i) <add> { <add> XYChart.Data<String, Float> data = series.getData().get(i); <add> <add> if (data.getYValue().floatValue() != loadProfile.get(i * 30).floatValue()) <add> series.getData().set(i, new XYChart.Data<>(String.valueOf(i), loadProfile.get(i * 30))); <ide> } <ide> } <ide> <ide> <ide> timeSpan.usageProperty().addListener((observable, oldValue, newValue) -> {updateChartData();}); <ide> timeSpan.fromProperty().addListener((observable, oldValue, newValue) -> {updateChartData();}); <del> timeSpan.toProperty().addListener((observable, oldValue, newValue) -> {updateChartData();}); <add> timeSpan.toProperty().addListener((observable, oldValue, newValue) -> { <add> updateChartData(); <add> }); <ide> timeSpan.mondayProperty().addListener((observable, oldValue, newValue) -> {updateChartData();}); <ide> timeSpan.tuesdayProperty().addListener((observable, oldValue, newValue) -> {updateChartData();}); <ide> timeSpan.wednesdayProperty().addListener((observable, oldValue, newValue) -> {updateChartData();}); <ide> timeSpan.thursdayProperty().addListener((observable, oldValue, newValue) -> {updateChartData();}); <ide> timeSpan.fridayProperty().addListener((observable, oldValue, newValue) -> {updateChartData();}); <ide> timeSpan.saturdayProperty().addListener((observable, oldValue, newValue) -> {updateChartData();}); <del> timeSpan.sundayProperty().addListener((observable, oldValue, newValue) -> {updateChartData();}); <add> timeSpan.sundayProperty().addListener((observable, oldValue, newValue) -> { <add> updateChartData(); <add> }); <ide> <ide> usageTable.getItems().add(timeSpan); <ide> building.calculateLoadProfile();
Java
mit
8cc12feb0e5dce60fdd1fecb6f29ea051e23a91f
0
rrbrambley/MessageBeast-Android
package com.alwaysallthetime.adnlibutils.manager; import android.content.BroadcastReceiver; import android.content.Context; import android.content.Intent; import android.content.IntentFilter; import android.location.Address; import android.location.Geocoder; import android.util.Log; import com.alwaysallthetime.adnlib.Annotations; import com.alwaysallthetime.adnlib.AppDotNetClient; import com.alwaysallthetime.adnlib.QueryParameters; import com.alwaysallthetime.adnlib.data.Annotation; import com.alwaysallthetime.adnlib.data.File; import com.alwaysallthetime.adnlib.data.Message; import com.alwaysallthetime.adnlib.data.MessageList; import com.alwaysallthetime.adnlib.gson.AppDotNetGson; import com.alwaysallthetime.adnlib.response.MessageListResponseHandler; import com.alwaysallthetime.adnlib.response.MessageResponseHandler; import com.alwaysallthetime.adnlibutils.db.ADNDatabase; import com.alwaysallthetime.adnlibutils.db.DisplayLocationInstances; import com.alwaysallthetime.adnlibutils.db.HashtagInstances; import com.alwaysallthetime.adnlibutils.db.OrderedMessageBatch; import com.alwaysallthetime.adnlibutils.model.DisplayLocation; import com.alwaysallthetime.adnlibutils.model.Geolocation; import com.alwaysallthetime.adnlibutils.model.MessagePlus; import com.alwaysallthetime.asyncgeocoder.AsyncGeocoder; import com.alwaysallthetime.asyncgeocoder.response.AsyncGeocoderResponseHandler; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Date; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Set; public class MessageManager { private static final String TAG = "ADNLibUtils_MessageManager"; private static final int MAX_MESSAGES_RETURNED_ON_SYNC = 100; public static final String INTENT_ACTION_UNSENT_MESSAGES_SENT = "com.alwaysallthetime.adnlibutils.manager.MessageManager.intent.unsentMessagesSent"; public static final String EXTRA_CHANNEL_ID = "com.alwaysallthetime.adnlibutils.manager.MessageManager.extras.channelId"; public static final String EXTRA_SENT_MESSAGE_IDS = "com.alwaysallthetime.adnlibutils.manager.MessageManager.extras.sentMessageIds"; /* * public data structures */ public static abstract class MessageManagerResponseHandler { private boolean isMore; private List<String> sentMessageIds; public abstract void onSuccess(final List<MessagePlus> responseData, final boolean appended); public abstract void onError(Exception exception); void setIsMore(boolean isMore) { this.isMore = isMore; } public boolean isMore() { return this.isMore; } } public static abstract class MessageManagerSyncResponseHandler extends MessageManagerResponseHandler { private int numMessagesSynced; void setNumMessagesSynced(int numMessagesSynced) { this.numMessagesSynced = numMessagesSynced; } public int getNumMessagesSynced() { return numMessagesSynced; } } public interface MessageRefreshResponseHandler { public void onSuccess(final MessagePlus responseData); public void onError(Exception exception); } public interface MessageDeletionResponseHandler { public void onSuccess(); public void onError(Exception exception); } /** * A MessageDisplayDateAdapter can be used to return a date for which a Message should be * associated. This is most typically used when Message.getCreatedAt() should not be used * for sort order. */ public interface MessageDisplayDateAdapter { public Date getDisplayDate(Message message); } private static MessageManager sInstance; private Context mContext; private ADNDatabase mDatabase; private AppDotNetClient mClient; private MessageManagerConfiguration mConfiguration; private HashMap<String, LinkedHashMap<String, MessagePlus>> mMessages; private HashMap<String, LinkedHashMap<String, MessagePlus>> mUnsentMessages; private HashMap<String, List<MessagePlus>> mPendingFiles; private HashMap<String, QueryParameters> mParameters; private HashMap<String, MinMaxPair> mMinMaxPairs; public static MessageManager getInstance() { return sInstance; } public static MessageManager init(Context context, AppDotNetClient client, MessageManagerConfiguration configuration) { sInstance = new MessageManager(context, client, configuration); return sInstance; } public MessageManager(Context context, AppDotNetClient client, MessageManagerConfiguration configuration) { mContext = context; mClient = client; mConfiguration = configuration; mDatabase = ADNDatabase.getInstance(mContext); mMessages = new HashMap<String, LinkedHashMap<String, MessagePlus>>(); mUnsentMessages = new HashMap<String, LinkedHashMap<String, MessagePlus>>(); mMinMaxPairs = new HashMap<String, MinMaxPair>(); mParameters = new HashMap<String, QueryParameters>(); mPendingFiles = new HashMap<String, List<MessagePlus>>(); IntentFilter intentFilter = new IntentFilter(FileUploadService.INTENT_ACTION_FILE_UPLOAD_COMPLETE); context.registerReceiver(fileUploadReceiver, intentFilter); } /** * Load persisted messages that were previously stored in the sqlite database. * * @param channelId the id of the channel for which messages should be loaded. * @param limit the maximum number of messages to load from the database. * @return a LinkedHashMap containing the newly loaded messages, mapped from message id * to Message Object. If no messages were loaded, then an empty Map is returned. * * @see com.alwaysallthetime.adnlibutils.manager.MessageManager.MessageManagerConfiguration#setDatabaseInsertionEnabled(boolean) */ public synchronized LinkedHashMap<String, MessagePlus> loadPersistedMessages(String channelId, int limit) { Date beforeDate = null; MinMaxPair minMaxPair = getMinMaxPair(channelId); if(minMaxPair.minId != null) { MessagePlus message = mMessages.get(channelId).get(minMaxPair.minId); beforeDate = message.getDisplayDate(); } OrderedMessageBatch orderedMessageBatch = mDatabase.getMessages(channelId, beforeDate, limit); LinkedHashMap<String, MessagePlus> messages = orderedMessageBatch.getMessages(); MinMaxPair dbMinMaxPair = orderedMessageBatch.getMinMaxPair(); minMaxPair = minMaxPair.combine(dbMinMaxPair); LinkedHashMap<String, MessagePlus> channelMessages = mMessages.get(channelId); if(channelMessages != null) { channelMessages.putAll(messages); } else { mMessages.put(channelId, messages); } mMinMaxPairs.put(channelId, minMaxPair); if(mConfiguration.isLocationLookupEnabled) { lookupLocation(messages.values(), false); } if(mConfiguration.isOEmbedLookupEnabled) { lookupOEmbed(messages.values(), false); } //this should always return only the newly loaded messages. return messages; } public LinkedHashMap<String, MessagePlus> loadPersistedMessagesTemporarily(String channelId, DisplayLocation location, ADNDatabase.LocationPrecision precision) { DisplayLocationInstances locationInstances = mDatabase.getDisplayLocationInstances(channelId, location, precision); return loadAndConfigureTemporaryMessages(channelId, locationInstances.getMessageIds()); } public LinkedHashMap<String, MessagePlus> loadPersistedMessagesTemporarily(String channelId, String hashtagName) { HashtagInstances hashtagInstances = mDatabase.getHashtagInstances(channelId, hashtagName); return loadAndConfigureTemporaryMessages(channelId, hashtagInstances.getMessageIds()); } private LinkedHashMap<String, MessagePlus> loadAndConfigureTemporaryMessages(String channelId, Collection<String> messageIds) { OrderedMessageBatch orderedMessageBatch = mDatabase.getMessages(channelId, messageIds); LinkedHashMap<String, MessagePlus> messages = orderedMessageBatch.getMessages(); if(mConfiguration.isLocationLookupEnabled) { lookupLocation(messages.values(), false); } if(mConfiguration.isOEmbedLookupEnabled) { lookupOEmbed(messages.values(), false); } return messages; } private void lookupOEmbed(Collection<MessagePlus> messages, boolean persistIfEnabled) { for(MessagePlus messagePlus : messages) { Message message = messagePlus.getMessage(); List<Annotation> oembeds = message.getAnnotationsOfType(Annotations.OEMBED); if(oembeds != null) { messagePlus.addOEmbedsFromAnnotations(oembeds); if(persistIfEnabled && mConfiguration.isDatabaseInsertionEnabled) { mDatabase.insertOrReplaceOEmbedInstances(messagePlus); } } } } private void lookupLocation(Collection<MessagePlus> messages, boolean persistIfEnabled) { for(MessagePlus messagePlus : messages) { Message message = messagePlus.getMessage(); Annotation checkin = message.getFirstAnnotationOfType(Annotations.CHECKIN); if(checkin != null) { messagePlus.setDisplayLocation(DisplayLocation.fromCheckinAnnotation(checkin)); if(persistIfEnabled && mConfiguration.isDatabaseInsertionEnabled) { mDatabase.insertOrReplaceDisplayLocationInstance(messagePlus); } continue; } Annotation ohaiLocation = message.getFirstAnnotationOfType(Annotations.OHAI_LOCATION); if(ohaiLocation != null) { messagePlus.setDisplayLocation(DisplayLocation.fromOhaiLocation(ohaiLocation)); if(persistIfEnabled && mConfiguration.isDatabaseInsertionEnabled) { mDatabase.insertOrReplaceDisplayLocationInstance(messagePlus); } continue; } Annotation geoAnnotation = message.getFirstAnnotationOfType(Annotations.GEOLOCATION); if(geoAnnotation != null) { HashMap<String,Object> value = geoAnnotation.getValue(); final double latitude = (Double)value.get("latitude"); final double longitude = (Double)value.get("longitude"); Geolocation geolocationObj = mDatabase.getGeolocation(latitude, longitude); if(geolocationObj != null) { messagePlus.setDisplayLocation(DisplayLocation.fromGeolocation(geolocationObj)); //this might seem odd based on the fact that we just pulled the geolocation //from the database, but the point is to save the instance of this geolocation's //use - we might obtain a geolocation with this message's lat/long, but that //doesn't mean that this message + geolocation combo has been saved. //(this database lookup is merely an optimization to avoid having to fire off // the async task in reverseGeocode().) if(persistIfEnabled && mConfiguration.isDatabaseInsertionEnabled) { mDatabase.insertOrReplaceDisplayLocationInstance(messagePlus); } continue; } else { reverseGeocode(messagePlus, latitude, longitude, persistIfEnabled); } } } } private void reverseGeocode(final MessagePlus messagePlus, final double latitude, final double longitude, final boolean persistIfEnabled) { if(Geocoder.isPresent()) { AsyncGeocoder.getInstance(mContext).getFromLocation(latitude, longitude, 5, new AsyncGeocoderResponseHandler() { @Override public void onSuccess(final List<Address> addresses) { Geolocation geolocation = getGeoLocation(addresses, latitude, longitude); if(geolocation != null) { messagePlus.setDisplayLocation(DisplayLocation.fromGeolocation(geolocation)); if(persistIfEnabled && mConfiguration.isDatabaseInsertionEnabled) { mDatabase.insertOrReplaceGeolocation(geolocation); mDatabase.insertOrReplaceDisplayLocationInstance(messagePlus); } } if(mConfiguration.locationLookupHandler != null) { mConfiguration.locationLookupHandler.onSuccess(messagePlus); } } @Override public void onException(Exception exception) { Log.e(TAG, exception.getMessage(), exception); if(mConfiguration.locationLookupHandler != null) { mConfiguration.locationLookupHandler.onException(messagePlus, exception); } } }); } } public Map<String, MessagePlus> getMessageMap(String channelId) { return mMessages.get(channelId); } public AppDotNetClient getClient() { return mClient; } public List<MessagePlus> getMessageList(String channelId) { Map<String, MessagePlus> messageMap = mMessages.get(channelId); if(messageMap == null) { return null; } MessagePlus[] messages = messageMap.values().toArray(new MessagePlus[0]); return Arrays.asList(messages); } public void setParameters(String channelId, QueryParameters parameters) { mParameters.put(channelId, parameters); } private synchronized MinMaxPair getMinMaxPair(String channelId) { MinMaxPair minMaxPair = mMinMaxPairs.get(channelId); if(minMaxPair == null) { minMaxPair = new MinMaxPair(); mMinMaxPairs.put(channelId, minMaxPair); } return minMaxPair; } private synchronized LinkedHashMap<String, MessagePlus> getChannelMessages(String channelId) { LinkedHashMap<String, MessagePlus> channelMessages = mMessages.get(channelId); if(channelMessages == null) { channelMessages = new LinkedHashMap<String, MessagePlus>(10); mMessages.put(channelId, channelMessages); } return channelMessages; } private synchronized LinkedHashMap<String, MessagePlus> getUnsentMessages(String channelId) { LinkedHashMap<String, MessagePlus> unsentMessages = mUnsentMessages.get(channelId); if(unsentMessages == null) { unsentMessages = mDatabase.getUnsentMessages(channelId); mUnsentMessages.put(channelId, unsentMessages); } return unsentMessages; } private synchronized List<MessagePlus> getMessagesNeedingPendingFile(String pendingFileId) { List<MessagePlus> messagePlusses = mPendingFiles.get(pendingFileId); if(messagePlusses == null) { messagePlusses = new ArrayList<MessagePlus>(1); mPendingFiles.put(pendingFileId, messagePlusses); } return messagePlusses; } public synchronized void clearMessages(String channelId) { mMinMaxPairs.put(channelId, null); LinkedHashMap<String, MessagePlus> channelMessages = mMessages.get(channelId); if(channelMessages != null) { channelMessages.clear(); mDatabase.deleteMessages(channelId); } } public synchronized boolean retrieveMessages(final String channelId, final MessageManagerResponseHandler handler) { MinMaxPair minMaxPair = getMinMaxPair(channelId); return retrieveMessages(channelId, minMaxPair.maxId, minMaxPair.minId, handler); } public synchronized boolean retrieveNewestMessages(final String channelId, final MessageManagerResponseHandler handler) { return retrieveMessages(channelId, getMinMaxPair(channelId).maxId, null, handler); } public synchronized boolean retrieveMoreMessages(final String channelId, final MessageManagerResponseHandler handler) { return retrieveMessages(channelId, null, getMinMaxPair(channelId).minId, handler); } public synchronized void createMessage(final String channelId, final Message message, final MessageManagerResponseHandler handler) { if(getUnsentMessages(channelId).size() > 0) { throw new RuntimeException("This method should not be called when you have unsent messages."); } mClient.createMessage(channelId, message, new MessageResponseHandler() { @Override public void onSuccess(Message responseData) { //we finish this off by retrieving the newest messages in case we were missing any //that came before the one we just created. retrieveNewestMessages(channelId, handler); } @Override public void onError(Exception error) { super.onError(error); handler.onError(error); } }); } public synchronized MessagePlus createUnsentMessageAndAttemptSend(final String channelId, Message message) { return createUnsentMessageAndAttemptSend(channelId, message, new HashSet<String>(0)); } public synchronized MessagePlus createUnsentMessageAndAttemptSend(final String channelId, Message message, Set<String> pendingFileIds) { if(!mConfiguration.isDatabaseInsertionEnabled) { throw new RuntimeException("Database insertion must be enabled in order to use the unsent messages feature"); } //An unsent message id is always set to the max id + 1. // //This will work because we will never allow message retrieval to happen //until unsent messages are sent to the server and they get their "real" //message id. After they reach the server, we will delete them from existence //on the client and retrieve them from the server. // LinkedHashMap<String, MessagePlus> channelMessages = getChannelMessages(channelId); if(channelMessages.size() == 0) { //we do this so that the max id is known. loadPersistedMessages(channelId, 1); } MinMaxPair minMaxPair = getMinMaxPair(channelId); Integer maxInteger = minMaxPair.getMaxAsInteger(); Integer newMessageId = maxInteger != null ? maxInteger + 1 : 1; String newMessageIdString = String.valueOf(newMessageId); MessagePlus.UnsentMessagePlusBuilder unsentBuilder = MessagePlus.UnsentMessagePlusBuilder.newBuilder(channelId, newMessageIdString, message); Iterator<String> iterator = pendingFileIds.iterator(); while(iterator.hasNext()) { unsentBuilder.addPendingOEmbed(iterator.next()); } final MessagePlus messagePlus = unsentBuilder.build(); mDatabase.insertOrReplaceMessage(messagePlus); //problem to solve - display locations need // if(mConfiguration.isLocationLookupEnabled) { // ArrayList<MessagePlus> mp = new ArrayList<MessagePlus>(1); // mp.add(messagePlus); // lookupLocation(mp, true); // } LinkedHashMap<String, MessagePlus> channelUnsentMessages = getUnsentMessages(channelId); channelUnsentMessages.put(newMessageIdString, messagePlus); LinkedHashMap<String, MessagePlus> newChannelMessages = new LinkedHashMap<String, MessagePlus>(channelMessages.size() + 1); newChannelMessages.put(messagePlus.getMessage().getId(), messagePlus); newChannelMessages.putAll(channelMessages); mMessages.put(channelId, newChannelMessages); minMaxPair.maxId = newMessageIdString; Log.d(TAG, "Created and stored unsent message with id " + newMessageIdString); sendUnsentMessages(channelId); return messagePlus; } public synchronized void deleteMessage(final MessagePlus messagePlus, final MessageDeletionResponseHandler handler) { if(messagePlus.isUnsent()) { Message message = messagePlus.getMessage(); String messageId = message.getId(); String channelId = message.getChannelId(); LinkedHashMap<String, MessagePlus> channelMessages = getChannelMessages(channelId); mDatabase.deleteMessage(messagePlus); getUnsentMessages(channelId).remove(messageId); channelMessages.remove(messageId); MinMaxPair minMaxPair = getMinMaxPair(channelId); if(channelMessages.size() > 0) { minMaxPair.maxId = channelMessages.keySet().iterator().next(); } else { minMaxPair.maxId = null; } handler.onSuccess(); } else { mClient.deleteMessage(messagePlus.getMessage(), new MessageResponseHandler() { @Override public void onSuccess(Message responseData) { LinkedHashMap<String, MessagePlus> channelMessages = mMessages.get(responseData.getChannelId()); channelMessages.remove(responseData.getId()); mDatabase.deleteMessage(messagePlus); //this one because the deleted one doesn't have the entities. handler.onSuccess(); } @Override public void onError(Exception error) { super.onError(error); handler.onError(error); } }); } } public synchronized void refreshMessage(final Message message, final MessageRefreshResponseHandler handler) { final String channelId = message.getChannelId(); mClient.retrieveMessage(channelId, message.getId(), mParameters.get(channelId), new MessageResponseHandler() { @Override public void onSuccess(Message responseData) { MessagePlus mPlus = new MessagePlus(responseData); mPlus.setDisplayDate(getAdjustedDate(responseData)); LinkedHashMap<String, MessagePlus> channelMessages = mMessages.get(channelId); if(channelMessages != null) { //could be null of channel messages weren't loaded first, etc. channelMessages.put(responseData.getId(), mPlus); } if(mConfiguration.isDatabaseInsertionEnabled) { mDatabase.insertOrReplaceMessage(mPlus); } handler.onSuccess(mPlus); } @Override public void onError(Exception error) { super.onError(error); handler.onError(error); } }); } /** * Sync and persist all Messages in a Channel. * * This is intended to be used as a one-time sync, e.g. after a user signs in. For this reason, * it is required that your MessageManagerConfiguration has its isDatabaseInsertionEnabled property * set to true. * * Because this could potentially result in a very large amount of Messages being obtained, * the provided MessageManagerResponseHandler will only be passed the first 100 Messages that are * obtained, while the others will be persisted to the sqlite database, but not kept in memory. * However, these can easily be loaded into memory afterwards by calling loadPersistedMessages(). * * @param channelId The id of the Channel from which to obtain Messages. * @param responseHandler MessageManagerResponseHandler * * @see com.alwaysallthetime.adnlibutils.manager.MessageManager.MessageManagerConfiguration#setDatabaseInsertionEnabled(boolean) * @see MessageManager#loadPersistedMessages(String, int) */ public synchronized void retrieveAndPersistAllMessages(String channelId, MessageManagerSyncResponseHandler responseHandler) { if(!mConfiguration.isDatabaseInsertionEnabled) { throw new RuntimeException("Database insertion must be enabled to use this functionality."); } final ArrayList<MessagePlus> messages = new ArrayList<MessagePlus>(MAX_MESSAGES_RETURNED_ON_SYNC); String sinceId = null; String beforeId = null; retrieveAllMessages(messages, sinceId, beforeId, channelId, responseHandler); } private synchronized void retrieveAllMessages(final ArrayList<MessagePlus> messages, String sinceId, String beforeId, final String channelId, final MessageManagerSyncResponseHandler responseHandler) { QueryParameters params = (QueryParameters) mParameters.get(channelId).clone(); params.put("since_id", sinceId); params.put("before_id", beforeId); params.put("count", String.valueOf(MAX_MESSAGES_RETURNED_ON_SYNC)); retrieveMessages(params, channelId, new MessageManagerResponseHandler() { @Override public void onSuccess(List<MessagePlus> responseData, boolean appended) { if(messages.size() == 0) { messages.addAll(responseData); } responseHandler.setNumMessagesSynced(responseHandler.getNumMessagesSynced() + responseData.size()); if(isMore()) { MinMaxPair minMaxPair = getMinMaxPair(channelId); retrieveAllMessages(messages, null, minMaxPair.minId, channelId, responseHandler); } else { Log.d(TAG, "Num messages synced: " + responseHandler.getNumMessagesSynced()); responseHandler.onSuccess(messages, true); } } @Override public void onError(Exception exception) { Log.e(TAG, exception.getMessage(), exception); responseHandler.onError(exception); } }); } private synchronized boolean retrieveMessages(final String channelId, final String sinceId, final String beforeId, final MessageManagerResponseHandler handler) { QueryParameters params = (QueryParameters) mParameters.get(channelId).clone(); params.put("since_id", sinceId); params.put("before_id", beforeId); return retrieveMessages(params, channelId, handler); } private synchronized void sendUnsentMessages(final LinkedHashMap<String, MessagePlus> unsentMessages, final ArrayList<String> sentMessageIds) { final MessagePlus messagePlus = unsentMessages.get(unsentMessages.keySet().iterator().next()); if(messagePlus.hasPendingOEmbeds()) { String pendingFileId = messagePlus.getPendingOEmbeds().iterator().next(); List<MessagePlus> messagesNeedingPendingFile = getMessagesNeedingPendingFile(pendingFileId); messagesNeedingPendingFile.add(messagePlus); //TODO: this should somehow be prepopulated? FileManager.getInstance(mContext, mClient).startPendingFileUpload(pendingFileId); return; } final Message message = messagePlus.getMessage(); //we had them set for display locally, but we should //let the server generate the "real" entities. message.setEntities(null); mClient.createMessage(message.getChannelId(), message, new MessageResponseHandler() { @Override public void onSuccess(Message responseData) { Log.d(TAG, "Successfully sent unsent message with id " + message.getId()); unsentMessages.remove(message.getId()); sentMessageIds.add(message.getId()); mDatabase.deleteMessage(messagePlus); //remove the message from in-memory message map. LinkedHashMap<String, MessagePlus> channelMessages = getChannelMessages(message.getChannelId()); channelMessages.remove(message.getId()); MinMaxPair minMaxPair = getMinMaxPair(message.getChannelId()); if(unsentMessages.size() > 0) { String nextId = unsentMessages.keySet().iterator().next(); minMaxPair.maxId = nextId; sendUnsentMessages(unsentMessages, sentMessageIds); } else { if(channelMessages.size() > 0) { //step back in time until we find the first message that was NOT one //of the unsent messages. this will be the max id. String nextMaxId = null; Iterator<String> channelMessagesIterator = channelMessages.keySet().iterator(); while(channelMessagesIterator.hasNext()) { String next = channelMessagesIterator.next(); if(!sentMessageIds.contains(next)) { minMaxPair.maxId = next; break; } } } else { minMaxPair.maxId = null; } Intent i = new Intent(INTENT_ACTION_UNSENT_MESSAGES_SENT); i.putExtra(EXTRA_CHANNEL_ID, message.getChannelId()); i.putStringArrayListExtra(EXTRA_SENT_MESSAGE_IDS, sentMessageIds); mContext.sendBroadcast(i); } } @Override public void onError(Exception exception) { super.onError(exception); } }); } public synchronized void sendUnsentMessages(final String channelId) { LinkedHashMap<String, MessagePlus> unsentMessages = getUnsentMessages(channelId); if(unsentMessages.size() > 0) { LinkedHashMap<String, MessagePlus> channelMessages = getChannelMessages(channelId); if(channelMessages.size() == 0) { //we do this so that the max id is known. loadPersistedMessages(channelId, unsentMessages.size() + 1); } ArrayList<String> sentMessageIds = new ArrayList<String>(unsentMessages.size()); sendUnsentMessages(unsentMessages, sentMessageIds); } } private synchronized boolean retrieveMessages(final QueryParameters queryParameters, final String channelId, final MessageManagerResponseHandler handler) { LinkedHashMap<String, MessagePlus> unsentMessages = getUnsentMessages(channelId); if(unsentMessages.size() > 0) { return false; } mClient.retrieveMessagesInChannel(channelId, queryParameters, new MessageListResponseHandler() { @Override public void onSuccess(final MessageList responseData) { boolean appended = true; String beforeId = queryParameters.get("before_id"); String sinceId = queryParameters.get("since_id"); MinMaxPair minMaxPair = getMinMaxPair(channelId); if(beforeId != null && sinceId == null) { String newMinId = getMinId(); if(newMinId != null) { minMaxPair.minId = newMinId; } } else if(beforeId == null && sinceId != null) { appended = false; String newMaxId = getMaxId(); if(newMaxId != null) { minMaxPair.maxId = newMaxId; } } else if(beforeId == null && sinceId == null) { minMaxPair.minId = getMinId(); minMaxPair.maxId = getMaxId(); } LinkedHashMap<String, MessagePlus> channelMessages = getChannelMessages(channelId); ArrayList<MessagePlus> newestMessages = new ArrayList<MessagePlus>(responseData.size()); LinkedHashMap<String, MessagePlus> newFullChannelMessagesMap = new LinkedHashMap<String, MessagePlus>(channelMessages.size() + responseData.size()); if(appended) { newFullChannelMessagesMap.putAll(channelMessages); } for(Message m : responseData) { MessagePlus mPlus = new MessagePlus(m); newestMessages.add(mPlus); adjustDateAndInsert(mPlus); newFullChannelMessagesMap.put(m.getId(), mPlus); } if(!appended) { newFullChannelMessagesMap.putAll(channelMessages); } mMessages.put(channelId, newFullChannelMessagesMap); if(mConfiguration.isLocationLookupEnabled) { lookupLocation(newestMessages, true); } if(mConfiguration.isOEmbedLookupEnabled) { lookupOEmbed(newestMessages, true); } if(handler != null) { handler.setIsMore(isMore()); handler.onSuccess(newestMessages, appended); } } @Override public void onError(Exception error) { Log.d(TAG, error.getMessage(), error); if(handler != null) { handler.onError(error); } } }); return true; } private void adjustDateAndInsert(MessagePlus mPlus) { Date adjustedDate = getAdjustedDate(mPlus.getMessage()); mPlus.setDisplayDate(adjustedDate); if(mConfiguration.isDatabaseInsertionEnabled) { mDatabase.insertOrReplaceMessage(mPlus); mDatabase.insertOrReplaceHashtagInstances(mPlus); } } private Date getAdjustedDate(Message message) { return mConfiguration.dateAdapter == null ? message.getCreatedAt() : mConfiguration.dateAdapter.getDisplayDate(message); } private Geolocation getGeoLocation(List<Address> addresses, double latitude, double longitude) { String locality = null; String subLocality = null; for(Address address : addresses) { if(subLocality == null) { subLocality = address.getSubLocality(); } if(subLocality != null || locality == null) { locality = address.getLocality(); } if(subLocality != null && locality != null) { break; } } if(subLocality != null && locality != null) { return new Geolocation(locality, subLocality, latitude, longitude); } return null; } private final BroadcastReceiver fileUploadReceiver = new BroadcastReceiver() { @Override public void onReceive(Context context, Intent intent) { if(FileUploadService.INTENT_ACTION_FILE_UPLOAD_COMPLETE.equals(intent.getAction())) { String pendingFileId = intent.getStringExtra(FileUploadService.EXTRA_PENDING_FILE_ID); if(pendingFileId != null) { boolean success = intent.getBooleanExtra(FileUploadService.EXTRA_SUCCESS, false); if(success) { Log.d(TAG, "Successfully uploaded pending file with id " + pendingFileId); List<MessagePlus> messagesNeedingFile = mPendingFiles.get(pendingFileId); if(messagesNeedingFile != null) { HashSet<String> channelIdsWithMessagesToSend = new HashSet<String>(); String fileJson = intent.getStringExtra(FileUploadService.EXTRA_FILE); File file = AppDotNetGson.getPersistenceInstance().fromJson(fileJson, File.class); for(MessagePlus messagePlus : messagesNeedingFile) { Message message = messagePlus.getMessage(); messagePlus.replacePendingOEmbedWithOEmbedAnnotation(pendingFileId, file); mDatabase.insertOrReplaceMessage(messagePlus); mDatabase.deletePendingOEmbed(pendingFileId, message.getId(), message.getChannelId()); if(messagePlus.getPendingOEmbeds().size() == 0) { channelIdsWithMessagesToSend.add(message.getChannelId()); } } for(String channelId : channelIdsWithMessagesToSend) { sendUnsentMessages(channelId); Log.d(TAG, "Now retrying send for unsent messages in channel " + channelId); } } } else { //TODO Log.e(TAG, "Failed to upload pending file with id " + pendingFileId); } } } } }; public static class MessageManagerConfiguration { public static interface MessageLocationLookupHandler { public void onSuccess(MessagePlus messagePlus); public void onException(MessagePlus messagePlus, Exception exception); } boolean isDatabaseInsertionEnabled; boolean isOEmbedLookupEnabled; boolean isLocationLookupEnabled; MessageDisplayDateAdapter dateAdapter; MessageLocationLookupHandler locationLookupHandler; /** * Enable or disable automatic insertion of Messages into a sqlite database * upon retrieval. By default, this feature is turned off. * * @param isEnabled true if all retrieved Messages should be stashed in a sqlite * database, false otherwise. */ public void setDatabaseInsertionEnabled(boolean isEnabled) { this.isDatabaseInsertionEnabled = isEnabled; } /** * Set a MessageDisplayDateAdapter. * * @param adapter * @see com.alwaysallthetime.adnlibutils.manager.MessageManager.MessageDisplayDateAdapter */ public void setMessageDisplayDateAdapter(MessageDisplayDateAdapter adapter) { this.dateAdapter = adapter; } /** * Enable OEmbed lookup on Messages. If enabled, annotations will be examined in order to * determine if OEmbed photo or video annotations are present. The associated MessagePlus * will then have these OEmbed Objects obtainable via convenience methods. * * This is especially useful when database insertion is enabled – instances of photo and * video OEmbeds will be stored in a table for look up at a later time (e.g. "gimme all * messages for which there are photos attached"). * * @param isEnabled */ public void setOEmbedLookupEnabled(boolean isEnabled) { this.isOEmbedLookupEnabled = isEnabled; } /** * Enable location lookup on Messages. If enabled, annotations will be examined in order * to construct a DisplayLocation. A DisplayLocation will be set on the associated MessagePlus * Object, based off one of these three annotations, if they exist: * * net.app.core.checkin * net.app.ohai.location * net.app.core.geolocation * * In the case of net.app.core.geolocation, an asynchronous task will be fired off to * perform reverse geolocation on the latitude/longitude coordinates. For this reason, you * should set a MessageLocationLookupHandler on this configuration if you want to perform * a task such as update UI after a location is obtained. * * If none of these annotations are found, then a null DisplayLocation is set on the * associated MessagePlus. * * @param isEnabled true if location lookup should be performed on all Messages * * @see com.alwaysallthetime.adnlibutils.model.MessagePlus#getDisplayLocation() * @see com.alwaysallthetime.adnlibutils.model.MessagePlus#hasSetDisplayLocation() * @see com.alwaysallthetime.adnlibutils.model.MessagePlus#hasDisplayLocation() */ public void setLocationLookupEnabled(boolean isEnabled) { this.isLocationLookupEnabled = isEnabled; } /** * Specify a handler to be notified when location lookup has completed for a MessagePlus. * This is particularly useful when a geolocation annotation requires an asynchronous * reverse geocoding task. * * @param handler */ public void setLocationLookupHandler(MessageLocationLookupHandler handler) { this.locationLookupHandler = handler; } } }
src/main/java/com/alwaysallthetime/adnlibutils/manager/MessageManager.java
package com.alwaysallthetime.adnlibutils.manager; import android.content.BroadcastReceiver; import android.content.Context; import android.content.Intent; import android.content.IntentFilter; import android.location.Address; import android.location.Geocoder; import android.util.Log; import com.alwaysallthetime.adnlib.Annotations; import com.alwaysallthetime.adnlib.AppDotNetClient; import com.alwaysallthetime.adnlib.QueryParameters; import com.alwaysallthetime.adnlib.data.Annotation; import com.alwaysallthetime.adnlib.data.File; import com.alwaysallthetime.adnlib.data.Message; import com.alwaysallthetime.adnlib.data.MessageList; import com.alwaysallthetime.adnlib.gson.AppDotNetGson; import com.alwaysallthetime.adnlib.response.MessageListResponseHandler; import com.alwaysallthetime.adnlib.response.MessageResponseHandler; import com.alwaysallthetime.adnlibutils.db.ADNDatabase; import com.alwaysallthetime.adnlibutils.db.DisplayLocationInstances; import com.alwaysallthetime.adnlibutils.db.HashtagInstances; import com.alwaysallthetime.adnlibutils.db.OrderedMessageBatch; import com.alwaysallthetime.adnlibutils.model.DisplayLocation; import com.alwaysallthetime.adnlibutils.model.Geolocation; import com.alwaysallthetime.adnlibutils.model.MessagePlus; import com.alwaysallthetime.asyncgeocoder.AsyncGeocoder; import com.alwaysallthetime.asyncgeocoder.response.AsyncGeocoderResponseHandler; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Date; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Set; public class MessageManager { private static final String TAG = "ADNLibUtils_MessageManager"; private static final int MAX_MESSAGES_RETURNED_ON_SYNC = 100; /* * public data structures */ public static abstract class MessageManagerResponseHandler { private boolean isMore; private List<String> sentMessageIds; public abstract void onSuccess(final List<MessagePlus> responseData, final boolean appended); public abstract void onError(Exception exception); void setIsMore(boolean isMore) { this.isMore = isMore; } public boolean isMore() { return this.isMore; } void setSentMessageIds(List<String> sentMessageIds) { this.sentMessageIds = sentMessageIds; } public List<String> getSentMessageIds() { return this.sentMessageIds; } } public static abstract class MessageManagerSyncResponseHandler extends MessageManagerResponseHandler { private int numMessagesSynced; void setNumMessagesSynced(int numMessagesSynced) { this.numMessagesSynced = numMessagesSynced; } public int getNumMessagesSynced() { return numMessagesSynced; } } public interface MessageManagerSendUnsentMessagesHandler { public void onSuccess(List<String> sentMessageIds); public void onFileUploadBegan(String pendingFileId, MessagePlus messagePlus); public void onError(Exception exception, List<String> sentMessageIds); } public interface MessageRefreshResponseHandler { public void onSuccess(final MessagePlus responseData); public void onError(Exception exception); } public interface MessageDeletionResponseHandler { public void onSuccess(); public void onError(Exception exception); } /** * A MessageDisplayDateAdapter can be used to return a date for which a Message should be * associated. This is most typically used when Message.getCreatedAt() should not be used * for sort order. */ public interface MessageDisplayDateAdapter { public Date getDisplayDate(Message message); } private static MessageManager sInstance; private Context mContext; private ADNDatabase mDatabase; private AppDotNetClient mClient; private MessageManagerConfiguration mConfiguration; private HashMap<String, LinkedHashMap<String, MessagePlus>> mMessages; private HashMap<String, LinkedHashMap<String, MessagePlus>> mUnsentMessages; private HashMap<String, List<MessagePlus>> mPendingFiles; private HashMap<String, QueryParameters> mParameters; private HashMap<String, MinMaxPair> mMinMaxPairs; public static MessageManager getInstance() { return sInstance; } public static MessageManager init(Context context, AppDotNetClient client, MessageManagerConfiguration configuration) { sInstance = new MessageManager(context, client, configuration); return sInstance; } public MessageManager(Context context, AppDotNetClient client, MessageManagerConfiguration configuration) { mContext = context; mClient = client; mConfiguration = configuration; mDatabase = ADNDatabase.getInstance(mContext); mMessages = new HashMap<String, LinkedHashMap<String, MessagePlus>>(); mUnsentMessages = new HashMap<String, LinkedHashMap<String, MessagePlus>>(); mMinMaxPairs = new HashMap<String, MinMaxPair>(); mParameters = new HashMap<String, QueryParameters>(); mPendingFiles = new HashMap<String, List<MessagePlus>>(); IntentFilter intentFilter = new IntentFilter(FileUploadService.INTENT_ACTION_FILE_UPLOAD_COMPLETE); context.registerReceiver(fileUploadReceiver, intentFilter); } /** * Load persisted messages that were previously stored in the sqlite database. * * @param channelId the id of the channel for which messages should be loaded. * @param limit the maximum number of messages to load from the database. * @return a LinkedHashMap containing the newly loaded messages, mapped from message id * to Message Object. If no messages were loaded, then an empty Map is returned. * * @see com.alwaysallthetime.adnlibutils.manager.MessageManager.MessageManagerConfiguration#setDatabaseInsertionEnabled(boolean) */ public synchronized LinkedHashMap<String, MessagePlus> loadPersistedMessages(String channelId, int limit) { Date beforeDate = null; MinMaxPair minMaxPair = getMinMaxPair(channelId); if(minMaxPair.minId != null) { MessagePlus message = mMessages.get(channelId).get(minMaxPair.minId); beforeDate = message.getDisplayDate(); } OrderedMessageBatch orderedMessageBatch = mDatabase.getMessages(channelId, beforeDate, limit); LinkedHashMap<String, MessagePlus> messages = orderedMessageBatch.getMessages(); MinMaxPair dbMinMaxPair = orderedMessageBatch.getMinMaxPair(); minMaxPair = minMaxPair.combine(dbMinMaxPair); LinkedHashMap<String, MessagePlus> channelMessages = mMessages.get(channelId); if(channelMessages != null) { channelMessages.putAll(messages); } else { mMessages.put(channelId, messages); } mMinMaxPairs.put(channelId, minMaxPair); if(mConfiguration.isLocationLookupEnabled) { lookupLocation(messages.values(), false); } if(mConfiguration.isOEmbedLookupEnabled) { lookupOEmbed(messages.values(), false); } //this should always return only the newly loaded messages. return messages; } public LinkedHashMap<String, MessagePlus> loadPersistedMessagesTemporarily(String channelId, DisplayLocation location, ADNDatabase.LocationPrecision precision) { DisplayLocationInstances locationInstances = mDatabase.getDisplayLocationInstances(channelId, location, precision); return loadAndConfigureTemporaryMessages(channelId, locationInstances.getMessageIds()); } public LinkedHashMap<String, MessagePlus> loadPersistedMessagesTemporarily(String channelId, String hashtagName) { HashtagInstances hashtagInstances = mDatabase.getHashtagInstances(channelId, hashtagName); return loadAndConfigureTemporaryMessages(channelId, hashtagInstances.getMessageIds()); } private LinkedHashMap<String, MessagePlus> loadAndConfigureTemporaryMessages(String channelId, Collection<String> messageIds) { OrderedMessageBatch orderedMessageBatch = mDatabase.getMessages(channelId, messageIds); LinkedHashMap<String, MessagePlus> messages = orderedMessageBatch.getMessages(); if(mConfiguration.isLocationLookupEnabled) { lookupLocation(messages.values(), false); } if(mConfiguration.isOEmbedLookupEnabled) { lookupOEmbed(messages.values(), false); } return messages; } private void lookupOEmbed(Collection<MessagePlus> messages, boolean persistIfEnabled) { for(MessagePlus messagePlus : messages) { Message message = messagePlus.getMessage(); List<Annotation> oembeds = message.getAnnotationsOfType(Annotations.OEMBED); if(oembeds != null) { messagePlus.addOEmbedsFromAnnotations(oembeds); if(persistIfEnabled && mConfiguration.isDatabaseInsertionEnabled) { mDatabase.insertOrReplaceOEmbedInstances(messagePlus); } } } } private void lookupLocation(Collection<MessagePlus> messages, boolean persistIfEnabled) { for(MessagePlus messagePlus : messages) { Message message = messagePlus.getMessage(); Annotation checkin = message.getFirstAnnotationOfType(Annotations.CHECKIN); if(checkin != null) { messagePlus.setDisplayLocation(DisplayLocation.fromCheckinAnnotation(checkin)); if(persistIfEnabled && mConfiguration.isDatabaseInsertionEnabled) { mDatabase.insertOrReplaceDisplayLocationInstance(messagePlus); } continue; } Annotation ohaiLocation = message.getFirstAnnotationOfType(Annotations.OHAI_LOCATION); if(ohaiLocation != null) { messagePlus.setDisplayLocation(DisplayLocation.fromOhaiLocation(ohaiLocation)); if(persistIfEnabled && mConfiguration.isDatabaseInsertionEnabled) { mDatabase.insertOrReplaceDisplayLocationInstance(messagePlus); } continue; } Annotation geoAnnotation = message.getFirstAnnotationOfType(Annotations.GEOLOCATION); if(geoAnnotation != null) { HashMap<String,Object> value = geoAnnotation.getValue(); final double latitude = (Double)value.get("latitude"); final double longitude = (Double)value.get("longitude"); Geolocation geolocationObj = mDatabase.getGeolocation(latitude, longitude); if(geolocationObj != null) { messagePlus.setDisplayLocation(DisplayLocation.fromGeolocation(geolocationObj)); //this might seem odd based on the fact that we just pulled the geolocation //from the database, but the point is to save the instance of this geolocation's //use - we might obtain a geolocation with this message's lat/long, but that //doesn't mean that this message + geolocation combo has been saved. //(this database lookup is merely an optimization to avoid having to fire off // the async task in reverseGeocode().) if(persistIfEnabled && mConfiguration.isDatabaseInsertionEnabled) { mDatabase.insertOrReplaceDisplayLocationInstance(messagePlus); } continue; } else { reverseGeocode(messagePlus, latitude, longitude, persistIfEnabled); } } } } private void reverseGeocode(final MessagePlus messagePlus, final double latitude, final double longitude, final boolean persistIfEnabled) { if(Geocoder.isPresent()) { AsyncGeocoder.getInstance(mContext).getFromLocation(latitude, longitude, 5, new AsyncGeocoderResponseHandler() { @Override public void onSuccess(final List<Address> addresses) { Geolocation geolocation = getGeoLocation(addresses, latitude, longitude); if(geolocation != null) { messagePlus.setDisplayLocation(DisplayLocation.fromGeolocation(geolocation)); if(persistIfEnabled && mConfiguration.isDatabaseInsertionEnabled) { mDatabase.insertOrReplaceGeolocation(geolocation); mDatabase.insertOrReplaceDisplayLocationInstance(messagePlus); } } if(mConfiguration.locationLookupHandler != null) { mConfiguration.locationLookupHandler.onSuccess(messagePlus); } } @Override public void onException(Exception exception) { Log.e(TAG, exception.getMessage(), exception); if(mConfiguration.locationLookupHandler != null) { mConfiguration.locationLookupHandler.onException(messagePlus, exception); } } }); } } public Map<String, MessagePlus> getMessageMap(String channelId) { return mMessages.get(channelId); } public AppDotNetClient getClient() { return mClient; } public List<MessagePlus> getMessageList(String channelId) { Map<String, MessagePlus> messageMap = mMessages.get(channelId); if(messageMap == null) { return null; } MessagePlus[] messages = messageMap.values().toArray(new MessagePlus[0]); return Arrays.asList(messages); } public void setParameters(String channelId, QueryParameters parameters) { mParameters.put(channelId, parameters); } private synchronized MinMaxPair getMinMaxPair(String channelId) { MinMaxPair minMaxPair = mMinMaxPairs.get(channelId); if(minMaxPair == null) { minMaxPair = new MinMaxPair(); mMinMaxPairs.put(channelId, minMaxPair); } return minMaxPair; } private synchronized LinkedHashMap<String, MessagePlus> getChannelMessages(String channelId) { LinkedHashMap<String, MessagePlus> channelMessages = mMessages.get(channelId); if(channelMessages == null) { channelMessages = new LinkedHashMap<String, MessagePlus>(10); mMessages.put(channelId, channelMessages); } return channelMessages; } private synchronized LinkedHashMap<String, MessagePlus> getUnsentMessages(String channelId) { LinkedHashMap<String, MessagePlus> unsentMessages = mUnsentMessages.get(channelId); if(unsentMessages == null) { unsentMessages = mDatabase.getUnsentMessages(channelId); mUnsentMessages.put(channelId, unsentMessages); } return unsentMessages; } private synchronized List<MessagePlus> getMessagesNeedingPendingFile(String pendingFileId) { List<MessagePlus> messagePlusses = mPendingFiles.get(pendingFileId); if(messagePlusses == null) { messagePlusses = new ArrayList<MessagePlus>(1); mPendingFiles.put(pendingFileId, messagePlusses); } return messagePlusses; } public synchronized void clearMessages(String channelId) { mMinMaxPairs.put(channelId, null); LinkedHashMap<String, MessagePlus> channelMessages = mMessages.get(channelId); if(channelMessages != null) { channelMessages.clear(); mDatabase.deleteMessages(channelId); } } public synchronized void retrieveMessages(final String channelId, final MessageManagerResponseHandler handler) { sendUnsentMessages(channelId, new MessageManagerSendUnsentMessagesHandler() { @Override public void onSuccess(final List<String> sentMessageIds) { MinMaxPair minMaxPair = getMinMaxPair(channelId); retrieveMessages(channelId, minMaxPair.maxId, minMaxPair.minId, sentMessageIds, handler); } @Override public void onFileUploadBegan(String pendingFileId, MessagePlus messagePlus) { Log.d(TAG, "began upload of pending file " + pendingFileId + " for message with id " + messagePlus.getMessage().getId()); } @Override public void onError(Exception exception, List<String> sentMessageIds) { //TODO: handle the sent messages in this case? Log.e(TAG, exception.getMessage(), exception); handler.onError(exception); } }); } public synchronized void retrieveNewestMessages(final String channelId, final MessageManagerResponseHandler handler) { sendUnsentMessages(channelId, new MessageManagerSendUnsentMessagesHandler() { @Override public void onSuccess(final List<String> sentMessageIds) { retrieveMessages(channelId, getMinMaxPair(channelId).maxId, null, sentMessageIds, handler); } @Override public void onFileUploadBegan(String pendingFileId, MessagePlus messagePlus) { Log.d(TAG, "began upload of pending file " + pendingFileId + " for message with id " + messagePlus.getMessage().getId()); } @Override public void onError(Exception exception, List<String> sentMessageIds) { //TODO: handle the sent messages in this case? Log.e(TAG, exception.getMessage(), exception); handler.onError(exception); } }); } public synchronized void retrieveMoreMessages(final String channelId, final MessageManagerResponseHandler handler) { sendUnsentMessages(channelId, new MessageManagerSendUnsentMessagesHandler() { @Override public void onSuccess(final List<String> sentMessageIds) { retrieveMessages(channelId, null, getMinMaxPair(channelId).minId, sentMessageIds, handler); } @Override public void onFileUploadBegan(String pendingFileId, MessagePlus messagePlus) { Log.d(TAG, "began upload of pending file " + pendingFileId + " for message with id " + messagePlus.getMessage().getId()); } @Override public void onError(Exception exception, List<String> sentMessageIds) { //TODO: handle the sent messages in this case? Log.e(TAG, exception.getMessage(), exception); handler.onError(exception); } }); } public synchronized void createMessage(final String channelId, final Message message, final MessageManagerResponseHandler handler) { if(getUnsentMessages(channelId).size() > 0) { throw new RuntimeException("This method should not be called when you have unsent messages."); } mClient.createMessage(channelId, message, new MessageResponseHandler() { @Override public void onSuccess(Message responseData) { //we finish this off by retrieving the newest messages in case we were missing any //that came before the one we just created. retrieveNewestMessages(channelId, handler); } @Override public void onError(Exception error) { super.onError(error); handler.onError(error); } }); } public synchronized MessagePlus createUnsentMessageAndAttemptSend(final String channelId, Message message) { return createUnsentMessageAndAttemptSend(channelId, message, new HashSet<String>(0), null); } public synchronized MessagePlus createUnsentMessageAndAttemptSend(final String channelId, Message message, final MessageManagerResponseHandler handler) { return createUnsentMessageAndAttemptSend(channelId, message, new HashSet<String>(0), handler); } public synchronized MessagePlus createUnsentMessageAndAttemptSend(final String channelId, Message message, Set<String> pendingFileIds, final MessageManagerResponseHandler handler) { if(!mConfiguration.isDatabaseInsertionEnabled) { throw new RuntimeException("Database insertion must be enabled in order to use the unsent messages feature"); } //An unsent message id is always set to the max id + 1. // //This will work because we will never allow message retrieval to happen //until unsent messages are sent to the server and they get their "real" //message id. After they reach the server, we will delete them from existence //on the client and retrieve them from the server. // LinkedHashMap<String, MessagePlus> channelMessages = getChannelMessages(channelId); if(channelMessages.size() == 0) { //we do this so that the max id is known. loadPersistedMessages(channelId, 1); } MinMaxPair minMaxPair = getMinMaxPair(channelId); Integer maxInteger = minMaxPair.getMaxAsInteger(); Integer newMessageId = maxInteger != null ? maxInteger + 1 : 1; String newMessageIdString = String.valueOf(newMessageId); MessagePlus.UnsentMessagePlusBuilder unsentBuilder = MessagePlus.UnsentMessagePlusBuilder.newBuilder(channelId, newMessageIdString, message); Iterator<String> iterator = pendingFileIds.iterator(); while(iterator.hasNext()) { unsentBuilder.addPendingOEmbed(iterator.next()); } final MessagePlus messagePlus = unsentBuilder.build(); mDatabase.insertOrReplaceMessage(messagePlus); //problem to solve - display locations need // if(mConfiguration.isLocationLookupEnabled) { // ArrayList<MessagePlus> mp = new ArrayList<MessagePlus>(1); // mp.add(messagePlus); // lookupLocation(mp, true); // } LinkedHashMap<String, MessagePlus> channelUnsentMessages = getUnsentMessages(channelId); channelUnsentMessages.put(newMessageIdString, messagePlus); LinkedHashMap<String, MessagePlus> newChannelMessages = new LinkedHashMap<String, MessagePlus>(channelMessages.size() + 1); newChannelMessages.put(messagePlus.getMessage().getId(), messagePlus); newChannelMessages.putAll(channelMessages); mMessages.put(channelId, newChannelMessages); minMaxPair.maxId = newMessageIdString; Log.d(TAG, "Created and stored unsent message with id " + newMessageIdString); sendUnsentMessages(channelId, new MessageManagerSendUnsentMessagesHandler() { @Override public void onSuccess(final List<String> sentMessageIds) { retrieveNewestMessages(channelId, new MessageManagerResponseHandler() { @Override public void onSuccess(List<MessagePlus> responseData, boolean appended) { if(handler != null) { handler.setSentMessageIds(sentMessageIds); handler.onSuccess(responseData, appended); } } @Override public void onError(Exception exception) { Log.d(TAG, exception.getMessage(), exception); if(handler != null) { handler.setSentMessageIds(sentMessageIds); handler.onError(exception); } } }); } @Override public void onFileUploadBegan(String pendingFileId, MessagePlus messagePlus) { Log.d(TAG, "began upload of pending file " + pendingFileId + " for message with id " + messagePlus.getMessage().getId()); } @Override public void onError(Exception exception, List<String> sentMessageIds) { Log.d(TAG, exception.getMessage(), exception); if(handler != null) { handler.setSentMessageIds(sentMessageIds); handler.onError(exception); } } }); return messagePlus; } public synchronized void deleteMessage(final MessagePlus messagePlus, final MessageDeletionResponseHandler handler) { if(messagePlus.isUnsent()) { Message message = messagePlus.getMessage(); String messageId = message.getId(); String channelId = message.getChannelId(); LinkedHashMap<String, MessagePlus> channelMessages = getChannelMessages(channelId); mDatabase.deleteMessage(messagePlus); getUnsentMessages(channelId).remove(messageId); channelMessages.remove(messageId); MinMaxPair minMaxPair = getMinMaxPair(channelId); if(channelMessages.size() > 0) { minMaxPair.maxId = channelMessages.keySet().iterator().next(); } else { minMaxPair.maxId = null; } handler.onSuccess(); } else { mClient.deleteMessage(messagePlus.getMessage(), new MessageResponseHandler() { @Override public void onSuccess(Message responseData) { LinkedHashMap<String, MessagePlus> channelMessages = mMessages.get(responseData.getChannelId()); channelMessages.remove(responseData.getId()); mDatabase.deleteMessage(messagePlus); //this one because the deleted one doesn't have the entities. handler.onSuccess(); } @Override public void onError(Exception error) { super.onError(error); handler.onError(error); } }); } } public synchronized void refreshMessage(final Message message, final MessageRefreshResponseHandler handler) { final String channelId = message.getChannelId(); mClient.retrieveMessage(channelId, message.getId(), mParameters.get(channelId), new MessageResponseHandler() { @Override public void onSuccess(Message responseData) { MessagePlus mPlus = new MessagePlus(responseData); mPlus.setDisplayDate(getAdjustedDate(responseData)); LinkedHashMap<String, MessagePlus> channelMessages = mMessages.get(channelId); if(channelMessages != null) { //could be null of channel messages weren't loaded first, etc. channelMessages.put(responseData.getId(), mPlus); } if(mConfiguration.isDatabaseInsertionEnabled) { mDatabase.insertOrReplaceMessage(mPlus); } handler.onSuccess(mPlus); } @Override public void onError(Exception error) { super.onError(error); handler.onError(error); } }); } /** * Sync and persist all Messages in a Channel. * * This is intended to be used as a one-time sync, e.g. after a user signs in. For this reason, * it is required that your MessageManagerConfiguration has its isDatabaseInsertionEnabled property * set to true. * * Because this could potentially result in a very large amount of Messages being obtained, * the provided MessageManagerResponseHandler will only be passed the first 100 Messages that are * obtained, while the others will be persisted to the sqlite database, but not kept in memory. * However, these can easily be loaded into memory afterwards by calling loadPersistedMessages(). * * @param channelId The id of the Channel from which to obtain Messages. * @param responseHandler MessageManagerResponseHandler * * @see com.alwaysallthetime.adnlibutils.manager.MessageManager.MessageManagerConfiguration#setDatabaseInsertionEnabled(boolean) * @see MessageManager#loadPersistedMessages(String, int) */ public synchronized void retrieveAndPersistAllMessages(String channelId, MessageManagerSyncResponseHandler responseHandler) { if(!mConfiguration.isDatabaseInsertionEnabled) { throw new RuntimeException("Database insertion must be enabled to use this functionality."); } final ArrayList<MessagePlus> messages = new ArrayList<MessagePlus>(MAX_MESSAGES_RETURNED_ON_SYNC); String sinceId = null; String beforeId = null; retrieveAllMessages(messages, sinceId, beforeId, channelId, responseHandler); } private synchronized void retrieveAllMessages(final ArrayList<MessagePlus> messages, String sinceId, String beforeId, final String channelId, final MessageManagerSyncResponseHandler responseHandler) { QueryParameters params = (QueryParameters) mParameters.get(channelId).clone(); params.put("since_id", sinceId); params.put("before_id", beforeId); params.put("count", String.valueOf(MAX_MESSAGES_RETURNED_ON_SYNC)); retrieveMessages(params, channelId, new ArrayList<String>(0), new MessageManagerResponseHandler() { @Override public void onSuccess(List<MessagePlus> responseData, boolean appended) { if(messages.size() == 0) { messages.addAll(responseData); } responseHandler.setNumMessagesSynced(responseHandler.getNumMessagesSynced() + responseData.size()); if(isMore()) { MinMaxPair minMaxPair = getMinMaxPair(channelId); retrieveAllMessages(messages, null, minMaxPair.minId, channelId, responseHandler); } else { Log.d(TAG, "Num messages synced: " + responseHandler.getNumMessagesSynced()); responseHandler.setSentMessageIds(getSentMessageIds()); responseHandler.onSuccess(messages, true); } } @Override public void onError(Exception exception) { Log.e(TAG, exception.getMessage(), exception); responseHandler.onError(exception); } }); } private synchronized void retrieveMessages(final String channelId, final String sinceId, final String beforeId, final List<String> sentMessageIds, final MessageManagerResponseHandler handler) { QueryParameters params = (QueryParameters) mParameters.get(channelId).clone(); params.put("since_id", sinceId); params.put("before_id", beforeId); retrieveMessages(params, channelId, sentMessageIds, handler); } private synchronized void sendUnsentMessages(final LinkedHashMap<String, MessagePlus> unsentMessages, final List<String> sentMessageIds, final MessageManagerSendUnsentMessagesHandler handler) { final MessagePlus messagePlus = unsentMessages.get(unsentMessages.keySet().iterator().next()); if(messagePlus.hasPendingOEmbeds()) { String pendingFileId = messagePlus.getPendingOEmbeds().iterator().next(); List<MessagePlus> messagesNeedingPendingFile = getMessagesNeedingPendingFile(pendingFileId); messagesNeedingPendingFile.add(messagePlus); //TODO: this should somehow be prepopulated? FileManager.getInstance(mContext, mClient).startPendingFileUpload(pendingFileId); if(handler != null) { handler.onFileUploadBegan(pendingFileId, messagePlus); } return; } final Message message = messagePlus.getMessage(); //we had them set for display locally, but we should //let the server generate the "real" entities. message.setEntities(null); mClient.createMessage(message.getChannelId(), message, new MessageResponseHandler() { @Override public void onSuccess(Message responseData) { Log.d(TAG, "Successfully sent unsent message with id " + message.getId()); unsentMessages.remove(message.getId()); sentMessageIds.add(message.getId()); mDatabase.deleteMessage(messagePlus); //remove the message from in-memory message map. LinkedHashMap<String, MessagePlus> channelMessages = getChannelMessages(message.getChannelId()); channelMessages.remove(message.getId()); MinMaxPair minMaxPair = getMinMaxPair(message.getChannelId()); if(unsentMessages.size() > 0) { String nextId = unsentMessages.keySet().iterator().next(); minMaxPair.maxId = nextId; sendUnsentMessages(unsentMessages, sentMessageIds, handler); } else { if(channelMessages.size() > 0) { //step back in time until we find the first message that was NOT one //of the unsent messages. this will be the max id. String nextMaxId = null; Iterator<String> channelMessagesIterator = channelMessages.keySet().iterator(); while(channelMessagesIterator.hasNext()) { String next = channelMessagesIterator.next(); if(!sentMessageIds.contains(next)) { minMaxPair.maxId = next; break; } } } else { minMaxPair.maxId = null; } if(handler != null) { handler.onSuccess(sentMessageIds); } } } @Override public void onError(Exception exception) { super.onError(exception); if(handler != null) { handler.onError(exception, sentMessageIds); } } }); } public synchronized void sendUnsentMessages(final String channelId, MessageManagerSendUnsentMessagesHandler handler) { LinkedHashMap<String, MessagePlus> unsentMessages = getUnsentMessages(channelId); if(unsentMessages.size() > 0) { LinkedHashMap<String, MessagePlus> channelMessages = getChannelMessages(channelId); if(channelMessages.size() == 0) { //we do this so that the max id is known. loadPersistedMessages(channelId, unsentMessages.size() + 1); } List<String> sentMessageIds = new ArrayList<String>(unsentMessages.size()); sendUnsentMessages(unsentMessages, sentMessageIds, handler); } else { if(handler != null) { handler.onSuccess(new ArrayList<String>(0)); } } } private synchronized void retrieveMessages(final QueryParameters queryParameters, final String channelId, final List<String> sentMessageIds, final MessageManagerResponseHandler handler) { mClient.retrieveMessagesInChannel(channelId, queryParameters, new MessageListResponseHandler() { @Override public void onSuccess(final MessageList responseData) { boolean appended = true; String beforeId = queryParameters.get("before_id"); String sinceId = queryParameters.get("since_id"); MinMaxPair minMaxPair = getMinMaxPair(channelId); if(beforeId != null && sinceId == null) { String newMinId = getMinId(); if(newMinId != null) { minMaxPair.minId = newMinId; } } else if(beforeId == null && sinceId != null) { appended = false; String newMaxId = getMaxId(); if(newMaxId != null) { minMaxPair.maxId = newMaxId; } } else if(beforeId == null && sinceId == null) { minMaxPair.minId = getMinId(); minMaxPair.maxId = getMaxId(); } LinkedHashMap<String, MessagePlus> channelMessages = getChannelMessages(channelId); ArrayList<MessagePlus> newestMessages = new ArrayList<MessagePlus>(responseData.size()); LinkedHashMap<String, MessagePlus> newFullChannelMessagesMap = new LinkedHashMap<String, MessagePlus>(channelMessages.size() + responseData.size()); if(appended) { newFullChannelMessagesMap.putAll(channelMessages); } for(Message m : responseData) { MessagePlus mPlus = new MessagePlus(m); newestMessages.add(mPlus); adjustDateAndInsert(mPlus); newFullChannelMessagesMap.put(m.getId(), mPlus); } if(!appended) { newFullChannelMessagesMap.putAll(channelMessages); } mMessages.put(channelId, newFullChannelMessagesMap); if(mConfiguration.isLocationLookupEnabled) { lookupLocation(newestMessages, true); } if(mConfiguration.isOEmbedLookupEnabled) { lookupOEmbed(newestMessages, true); } if(handler != null) { handler.setSentMessageIds(sentMessageIds); handler.setIsMore(isMore()); handler.onSuccess(newestMessages, appended); } } @Override public void onError(Exception error) { Log.d(TAG, error.getMessage(), error); if(handler != null) { handler.setSentMessageIds(sentMessageIds); handler.onError(error); } } }); } private void adjustDateAndInsert(MessagePlus mPlus) { Date adjustedDate = getAdjustedDate(mPlus.getMessage()); mPlus.setDisplayDate(adjustedDate); if(mConfiguration.isDatabaseInsertionEnabled) { mDatabase.insertOrReplaceMessage(mPlus); mDatabase.insertOrReplaceHashtagInstances(mPlus); } } private Date getAdjustedDate(Message message) { return mConfiguration.dateAdapter == null ? message.getCreatedAt() : mConfiguration.dateAdapter.getDisplayDate(message); } private Geolocation getGeoLocation(List<Address> addresses, double latitude, double longitude) { String locality = null; String subLocality = null; for(Address address : addresses) { if(subLocality == null) { subLocality = address.getSubLocality(); } if(subLocality != null || locality == null) { locality = address.getLocality(); } if(subLocality != null && locality != null) { break; } } if(subLocality != null && locality != null) { return new Geolocation(locality, subLocality, latitude, longitude); } return null; } private final BroadcastReceiver fileUploadReceiver = new BroadcastReceiver() { @Override public void onReceive(Context context, Intent intent) { if(FileUploadService.INTENT_ACTION_FILE_UPLOAD_COMPLETE.equals(intent.getAction())) { String pendingFileId = intent.getStringExtra(FileUploadService.EXTRA_PENDING_FILE_ID); if(pendingFileId != null) { boolean success = intent.getBooleanExtra(FileUploadService.EXTRA_SUCCESS, false); if(success) { Log.d(TAG, "Successfully uploaded pending file with id " + pendingFileId); List<MessagePlus> messagesNeedingFile = mPendingFiles.get(pendingFileId); if(messagesNeedingFile != null) { HashSet<String> channelIdsWithMessagesToSend = new HashSet<String>(); String fileJson = intent.getStringExtra(FileUploadService.EXTRA_FILE); File file = AppDotNetGson.getPersistenceInstance().fromJson(fileJson, File.class); for(MessagePlus messagePlus : messagesNeedingFile) { Message message = messagePlus.getMessage(); messagePlus.replacePendingOEmbedWithOEmbedAnnotation(pendingFileId, file); mDatabase.insertOrReplaceMessage(messagePlus); mDatabase.deletePendingOEmbed(pendingFileId, message.getId(), message.getChannelId()); if(messagePlus.getPendingOEmbeds().size() == 0) { channelIdsWithMessagesToSend.add(message.getChannelId()); } } for(String channelId : channelIdsWithMessagesToSend) { sendUnsentMessages(channelId, null); Log.d(TAG, "Now retrying send for unsent messages in channel " + channelId); } } } else { //TODO Log.e(TAG, "Failed to upload pending file with id " + pendingFileId); } } } } }; public static class MessageManagerConfiguration { public static interface MessageLocationLookupHandler { public void onSuccess(MessagePlus messagePlus); public void onException(MessagePlus messagePlus, Exception exception); } boolean isDatabaseInsertionEnabled; boolean isOEmbedLookupEnabled; boolean isLocationLookupEnabled; MessageDisplayDateAdapter dateAdapter; MessageLocationLookupHandler locationLookupHandler; /** * Enable or disable automatic insertion of Messages into a sqlite database * upon retrieval. By default, this feature is turned off. * * @param isEnabled true if all retrieved Messages should be stashed in a sqlite * database, false otherwise. */ public void setDatabaseInsertionEnabled(boolean isEnabled) { this.isDatabaseInsertionEnabled = isEnabled; } /** * Set a MessageDisplayDateAdapter. * * @param adapter * @see com.alwaysallthetime.adnlibutils.manager.MessageManager.MessageDisplayDateAdapter */ public void setMessageDisplayDateAdapter(MessageDisplayDateAdapter adapter) { this.dateAdapter = adapter; } /** * Enable OEmbed lookup on Messages. If enabled, annotations will be examined in order to * determine if OEmbed photo or video annotations are present. The associated MessagePlus * will then have these OEmbed Objects obtainable via convenience methods. * * This is especially useful when database insertion is enabled – instances of photo and * video OEmbeds will be stored in a table for look up at a later time (e.g. "gimme all * messages for which there are photos attached"). * * @param isEnabled */ public void setOEmbedLookupEnabled(boolean isEnabled) { this.isOEmbedLookupEnabled = isEnabled; } /** * Enable location lookup on Messages. If enabled, annotations will be examined in order * to construct a DisplayLocation. A DisplayLocation will be set on the associated MessagePlus * Object, based off one of these three annotations, if they exist: * * net.app.core.checkin * net.app.ohai.location * net.app.core.geolocation * * In the case of net.app.core.geolocation, an asynchronous task will be fired off to * perform reverse geolocation on the latitude/longitude coordinates. For this reason, you * should set a MessageLocationLookupHandler on this configuration if you want to perform * a task such as update UI after a location is obtained. * * If none of these annotations are found, then a null DisplayLocation is set on the * associated MessagePlus. * * @param isEnabled true if location lookup should be performed on all Messages * * @see com.alwaysallthetime.adnlibutils.model.MessagePlus#getDisplayLocation() * @see com.alwaysallthetime.adnlibutils.model.MessagePlus#hasSetDisplayLocation() * @see com.alwaysallthetime.adnlibutils.model.MessagePlus#hasDisplayLocation() */ public void setLocationLookupEnabled(boolean isEnabled) { this.isLocationLookupEnabled = isEnabled; } /** * Specify a handler to be notified when location lookup has completed for a MessagePlus. * This is particularly useful when a geolocation annotation requires an asynchronous * reverse geocoding task. * * @param handler */ public void setLocationLookupHandler(MessageLocationLookupHandler handler) { this.locationLookupHandler = handler; } } }
broadcast now used to report unsent messages were sent
src/main/java/com/alwaysallthetime/adnlibutils/manager/MessageManager.java
broadcast now used to report unsent messages were sent
<ide><path>rc/main/java/com/alwaysallthetime/adnlibutils/manager/MessageManager.java <ide> import java.util.HashSet; <ide> import java.util.Iterator; <ide> import java.util.LinkedHashMap; <del>import java.util.LinkedHashSet; <ide> import java.util.List; <ide> import java.util.Map; <ide> import java.util.Set; <ide> private static final String TAG = "ADNLibUtils_MessageManager"; <ide> <ide> private static final int MAX_MESSAGES_RETURNED_ON_SYNC = 100; <add> <add> public static final String INTENT_ACTION_UNSENT_MESSAGES_SENT = "com.alwaysallthetime.adnlibutils.manager.MessageManager.intent.unsentMessagesSent"; <add> public static final String EXTRA_CHANNEL_ID = "com.alwaysallthetime.adnlibutils.manager.MessageManager.extras.channelId"; <add> public static final String EXTRA_SENT_MESSAGE_IDS = "com.alwaysallthetime.adnlibutils.manager.MessageManager.extras.sentMessageIds"; <ide> <ide> /* <ide> * public data structures <ide> public boolean isMore() { <ide> return this.isMore; <ide> } <del> <del> void setSentMessageIds(List<String> sentMessageIds) { <del> this.sentMessageIds = sentMessageIds; <del> } <del> <del> public List<String> getSentMessageIds() { <del> return this.sentMessageIds; <del> } <ide> } <ide> <ide> public static abstract class MessageManagerSyncResponseHandler extends MessageManagerResponseHandler { <ide> public int getNumMessagesSynced() { <ide> return numMessagesSynced; <ide> } <del> } <del> <del> public interface MessageManagerSendUnsentMessagesHandler { <del> public void onSuccess(List<String> sentMessageIds); <del> public void onFileUploadBegan(String pendingFileId, MessagePlus messagePlus); <del> public void onError(Exception exception, List<String> sentMessageIds); <ide> } <ide> <ide> public interface MessageRefreshResponseHandler { <ide> } <ide> } <ide> <del> public synchronized void retrieveMessages(final String channelId, final MessageManagerResponseHandler handler) { <del> sendUnsentMessages(channelId, new MessageManagerSendUnsentMessagesHandler() { <del> @Override <del> public void onSuccess(final List<String> sentMessageIds) { <del> MinMaxPair minMaxPair = getMinMaxPair(channelId); <del> retrieveMessages(channelId, minMaxPair.maxId, minMaxPair.minId, sentMessageIds, handler); <del> } <del> <del> @Override <del> public void onFileUploadBegan(String pendingFileId, MessagePlus messagePlus) { <del> Log.d(TAG, "began upload of pending file " + pendingFileId + " for message with id " + messagePlus.getMessage().getId()); <del> } <del> <del> @Override <del> public void onError(Exception exception, List<String> sentMessageIds) { <del> //TODO: handle the sent messages in this case? <del> Log.e(TAG, exception.getMessage(), exception); <del> handler.onError(exception); <del> } <del> }); <del> } <del> <del> public synchronized void retrieveNewestMessages(final String channelId, final MessageManagerResponseHandler handler) { <del> sendUnsentMessages(channelId, new MessageManagerSendUnsentMessagesHandler() { <del> @Override <del> public void onSuccess(final List<String> sentMessageIds) { <del> retrieveMessages(channelId, getMinMaxPair(channelId).maxId, null, sentMessageIds, handler); <del> } <del> <del> @Override <del> public void onFileUploadBegan(String pendingFileId, MessagePlus messagePlus) { <del> Log.d(TAG, "began upload of pending file " + pendingFileId + " for message with id " + messagePlus.getMessage().getId()); <del> } <del> <del> @Override <del> public void onError(Exception exception, List<String> sentMessageIds) { <del> //TODO: handle the sent messages in this case? <del> Log.e(TAG, exception.getMessage(), exception); <del> handler.onError(exception); <del> } <del> }); <del> } <del> <del> public synchronized void retrieveMoreMessages(final String channelId, final MessageManagerResponseHandler handler) { <del> sendUnsentMessages(channelId, new MessageManagerSendUnsentMessagesHandler() { <del> @Override <del> public void onSuccess(final List<String> sentMessageIds) { <del> retrieveMessages(channelId, null, getMinMaxPair(channelId).minId, sentMessageIds, handler); <del> } <del> <del> @Override <del> public void onFileUploadBegan(String pendingFileId, MessagePlus messagePlus) { <del> Log.d(TAG, "began upload of pending file " + pendingFileId + " for message with id " + messagePlus.getMessage().getId()); <del> } <del> <del> @Override <del> public void onError(Exception exception, List<String> sentMessageIds) { <del> //TODO: handle the sent messages in this case? <del> Log.e(TAG, exception.getMessage(), exception); <del> handler.onError(exception); <del> } <del> }); <add> public synchronized boolean retrieveMessages(final String channelId, final MessageManagerResponseHandler handler) { <add> MinMaxPair minMaxPair = getMinMaxPair(channelId); <add> return retrieveMessages(channelId, minMaxPair.maxId, minMaxPair.minId, handler); <add> } <add> <add> public synchronized boolean retrieveNewestMessages(final String channelId, final MessageManagerResponseHandler handler) { <add> return retrieveMessages(channelId, getMinMaxPair(channelId).maxId, null, handler); <add> } <add> <add> public synchronized boolean retrieveMoreMessages(final String channelId, final MessageManagerResponseHandler handler) { <add> return retrieveMessages(channelId, null, getMinMaxPair(channelId).minId, handler); <ide> } <ide> <ide> public synchronized void createMessage(final String channelId, final Message message, final MessageManagerResponseHandler handler) { <ide> } <ide> <ide> public synchronized MessagePlus createUnsentMessageAndAttemptSend(final String channelId, Message message) { <del> return createUnsentMessageAndAttemptSend(channelId, message, new HashSet<String>(0), null); <del> } <del> <del> public synchronized MessagePlus createUnsentMessageAndAttemptSend(final String channelId, Message message, final MessageManagerResponseHandler handler) { <del> return createUnsentMessageAndAttemptSend(channelId, message, new HashSet<String>(0), handler); <del> } <del> <del> public synchronized MessagePlus createUnsentMessageAndAttemptSend(final String channelId, Message message, Set<String> pendingFileIds, final MessageManagerResponseHandler handler) { <add> return createUnsentMessageAndAttemptSend(channelId, message, new HashSet<String>(0)); <add> } <add> <add> public synchronized MessagePlus createUnsentMessageAndAttemptSend(final String channelId, Message message, Set<String> pendingFileIds) { <ide> if(!mConfiguration.isDatabaseInsertionEnabled) { <ide> throw new RuntimeException("Database insertion must be enabled in order to use the unsent messages feature"); <ide> } <ide> <ide> Log.d(TAG, "Created and stored unsent message with id " + newMessageIdString); <ide> <del> sendUnsentMessages(channelId, new MessageManagerSendUnsentMessagesHandler() { <del> @Override <del> public void onSuccess(final List<String> sentMessageIds) { <del> retrieveNewestMessages(channelId, new MessageManagerResponseHandler() { <del> @Override <del> public void onSuccess(List<MessagePlus> responseData, boolean appended) { <del> if(handler != null) { <del> handler.setSentMessageIds(sentMessageIds); <del> handler.onSuccess(responseData, appended); <del> } <del> } <del> <del> @Override <del> public void onError(Exception exception) { <del> Log.d(TAG, exception.getMessage(), exception); <del> if(handler != null) { <del> handler.setSentMessageIds(sentMessageIds); <del> handler.onError(exception); <del> } <del> } <del> }); <del> } <del> <del> @Override <del> public void onFileUploadBegan(String pendingFileId, MessagePlus messagePlus) { <del> Log.d(TAG, "began upload of pending file " + pendingFileId + " for message with id " + messagePlus.getMessage().getId()); <del> } <del> <del> @Override <del> public void onError(Exception exception, List<String> sentMessageIds) { <del> Log.d(TAG, exception.getMessage(), exception); <del> if(handler != null) { <del> handler.setSentMessageIds(sentMessageIds); <del> handler.onError(exception); <del> } <del> } <del> }); <add> sendUnsentMessages(channelId); <ide> <ide> return messagePlus; <ide> } <ide> params.put("before_id", beforeId); <ide> params.put("count", String.valueOf(MAX_MESSAGES_RETURNED_ON_SYNC)); <ide> <del> retrieveMessages(params, channelId, new ArrayList<String>(0), new MessageManagerResponseHandler() { <add> retrieveMessages(params, channelId, new MessageManagerResponseHandler() { <ide> @Override <ide> public void onSuccess(List<MessagePlus> responseData, boolean appended) { <ide> if(messages.size() == 0) { <ide> retrieveAllMessages(messages, null, minMaxPair.minId, channelId, responseHandler); <ide> } else { <ide> Log.d(TAG, "Num messages synced: " + responseHandler.getNumMessagesSynced()); <del> responseHandler.setSentMessageIds(getSentMessageIds()); <ide> responseHandler.onSuccess(messages, true); <ide> } <ide> } <ide> }); <ide> } <ide> <del> private synchronized void retrieveMessages(final String channelId, final String sinceId, final String beforeId, final List<String> sentMessageIds, final MessageManagerResponseHandler handler) { <add> private synchronized boolean retrieveMessages(final String channelId, final String sinceId, final String beforeId, final MessageManagerResponseHandler handler) { <ide> QueryParameters params = (QueryParameters) mParameters.get(channelId).clone(); <ide> params.put("since_id", sinceId); <ide> params.put("before_id", beforeId); <del> retrieveMessages(params, channelId, sentMessageIds, handler); <del> } <del> <del> private synchronized void sendUnsentMessages(final LinkedHashMap<String, MessagePlus> unsentMessages, final List<String> sentMessageIds, final MessageManagerSendUnsentMessagesHandler handler) { <add> return retrieveMessages(params, channelId, handler); <add> } <add> <add> private synchronized void sendUnsentMessages(final LinkedHashMap<String, MessagePlus> unsentMessages, final ArrayList<String> sentMessageIds) { <ide> final MessagePlus messagePlus = unsentMessages.get(unsentMessages.keySet().iterator().next()); <ide> if(messagePlus.hasPendingOEmbeds()) { <ide> String pendingFileId = messagePlus.getPendingOEmbeds().iterator().next(); <ide> //TODO: this should somehow be prepopulated? <ide> <ide> FileManager.getInstance(mContext, mClient).startPendingFileUpload(pendingFileId); <del> if(handler != null) { <del> handler.onFileUploadBegan(pendingFileId, messagePlus); <del> } <ide> return; <ide> } <ide> final Message message = messagePlus.getMessage(); <ide> if(unsentMessages.size() > 0) { <ide> String nextId = unsentMessages.keySet().iterator().next(); <ide> minMaxPair.maxId = nextId; <del> sendUnsentMessages(unsentMessages, sentMessageIds, handler); <add> sendUnsentMessages(unsentMessages, sentMessageIds); <ide> } else { <ide> if(channelMessages.size() > 0) { <ide> //step back in time until we find the first message that was NOT one <ide> } else { <ide> minMaxPair.maxId = null; <ide> } <del> if(handler != null) { <del> handler.onSuccess(sentMessageIds); <del> } <add> <add> Intent i = new Intent(INTENT_ACTION_UNSENT_MESSAGES_SENT); <add> i.putExtra(EXTRA_CHANNEL_ID, message.getChannelId()); <add> i.putStringArrayListExtra(EXTRA_SENT_MESSAGE_IDS, sentMessageIds); <add> mContext.sendBroadcast(i); <ide> } <ide> } <ide> <ide> @Override <ide> public void onError(Exception exception) { <ide> super.onError(exception); <del> if(handler != null) { <del> handler.onError(exception, sentMessageIds); <del> } <ide> } <ide> }); <ide> } <ide> <del> public synchronized void sendUnsentMessages(final String channelId, MessageManagerSendUnsentMessagesHandler handler) { <add> public synchronized void sendUnsentMessages(final String channelId) { <ide> LinkedHashMap<String, MessagePlus> unsentMessages = getUnsentMessages(channelId); <ide> if(unsentMessages.size() > 0) { <ide> LinkedHashMap<String, MessagePlus> channelMessages = getChannelMessages(channelId); <ide> //we do this so that the max id is known. <ide> loadPersistedMessages(channelId, unsentMessages.size() + 1); <ide> } <del> List<String> sentMessageIds = new ArrayList<String>(unsentMessages.size()); <del> sendUnsentMessages(unsentMessages, sentMessageIds, handler); <del> } else { <del> if(handler != null) { <del> handler.onSuccess(new ArrayList<String>(0)); <del> } <del> } <del> } <del> <del> private synchronized void retrieveMessages(final QueryParameters queryParameters, final String channelId, final List<String> sentMessageIds, final MessageManagerResponseHandler handler) { <add> ArrayList<String> sentMessageIds = new ArrayList<String>(unsentMessages.size()); <add> sendUnsentMessages(unsentMessages, sentMessageIds); <add> } <add> } <add> <add> private synchronized boolean retrieveMessages(final QueryParameters queryParameters, final String channelId, final MessageManagerResponseHandler handler) { <add> LinkedHashMap<String, MessagePlus> unsentMessages = getUnsentMessages(channelId); <add> if(unsentMessages.size() > 0) { <add> return false; <add> } <ide> mClient.retrieveMessagesInChannel(channelId, queryParameters, new MessageListResponseHandler() { <ide> @Override <ide> public void onSuccess(final MessageList responseData) { <ide> } <ide> <ide> if(handler != null) { <del> handler.setSentMessageIds(sentMessageIds); <ide> handler.setIsMore(isMore()); <ide> handler.onSuccess(newestMessages, appended); <ide> } <ide> Log.d(TAG, error.getMessage(), error); <ide> <ide> if(handler != null) { <del> handler.setSentMessageIds(sentMessageIds); <ide> handler.onError(error); <ide> } <ide> } <ide> }); <add> return true; <ide> } <ide> <ide> private void adjustDateAndInsert(MessagePlus mPlus) { <ide> } <ide> <ide> for(String channelId : channelIdsWithMessagesToSend) { <del> sendUnsentMessages(channelId, null); <add> sendUnsentMessages(channelId); <ide> Log.d(TAG, "Now retrying send for unsent messages in channel " + channelId); <ide> } <ide> }
Java
apache-2.0
9a6376cedc9afd8b4e0cb967302b59992669594a
0
Sbrenthughes/Hygieia,Sbrenthughes/Hygieia,jimzucker/Hygieia,Sbrenthughes/Hygieia,jimzucker/Hygieia,nireeshT/Hygieia,capitalone/Hygieia,Sbrenthughes/Hygieia,jimzucker/Hygieia,nireeshT/Hygieia,capitalone/Hygieia,nireeshT/Hygieia,capitalone/Hygieia,capitalone/Hygieia,nireeshT/Hygieia,jimzucker/Hygieia
package com.capitalone.dashboard.evaluator; import com.capitalone.dashboard.ApiSettings; import com.capitalone.dashboard.model.AuditException; import com.capitalone.dashboard.model.CollectorItem; import com.capitalone.dashboard.model.CollectorType; import com.capitalone.dashboard.model.Dashboard; import com.capitalone.dashboard.model.DashboardType; import com.capitalone.dashboard.model.TestResult; import com.capitalone.dashboard.model.TestSuiteType; import com.capitalone.dashboard.model.TestCapability; import com.capitalone.dashboard.model.TestSuite; import com.capitalone.dashboard.model.Traceability; import com.capitalone.dashboard.model.Widget; import com.capitalone.dashboard.model.Feature; import com.capitalone.dashboard.model.StoryIndicator; import com.capitalone.dashboard.repository.FeatureRepository; import com.capitalone.dashboard.repository.TestResultRepository; import com.capitalone.dashboard.response.TestResultsAuditResponse; import com.capitalone.dashboard.status.TestResultAuditStatus; import org.apache.commons.collections.CollectionUtils; import org.apache.commons.lang3.math.NumberUtils; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.ArrayList; import java.util.Date; import java.util.Comparator; import java.util.Optional; import java.util.Map; import java.util.regex.Pattern; import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.stream.Collectors; @Component public class RegressionTestResultEvaluator extends Evaluator<TestResultsAuditResponse> { private final TestResultRepository testResultRepository; private final FeatureRepository featureRepository; private static final Logger LOGGER = LoggerFactory.getLogger(RegressionTestResultEvaluator.class); private long beginDate; private long endDate; private Dashboard dashboard; private static final String WIDGET_CODE_ANALYSIS = "codeanalysis"; private static final String WIDGET_FEATURE = "feature"; private static final String STR_TEAM_ID = "teamId"; private static final String STR_UNDERSCORE = "_"; private static final String STR_HYPHEN = "-"; private static final String STR_AT = "@"; private static final String STR_EMPTY = ""; private static final String SUCCESS_COUNT = "successCount"; private static final String FAILURE_COUNT = "failureCount"; private static final String SKIP_COUNT = "skippedCount"; private static final String TOTAL_COUNT = "totalCount"; private static final String TEST_CASE_SUCCESS_COUNT = "successTestCaseCount"; private static final String TEST_CASE_FAILURE_COUNT = "failureTestCaseCount"; private static final String TEST_CASE_SKIPPED_COUNT = "skippedTestCaseCount"; private static final String TEST_CASE_TOTAL_COUNT = "totalTestCaseCount"; private static final String PRIORITY_HIGH = "High"; @Autowired public RegressionTestResultEvaluator(TestResultRepository testResultRepository, FeatureRepository featureRepository) { this.testResultRepository = testResultRepository; this.featureRepository = featureRepository; } @Override public Collection<TestResultsAuditResponse> evaluate(Dashboard dashboard, long beginDate, long endDate, Map<?, ?> dummy) throws AuditException { this.beginDate = beginDate-1; this.endDate = endDate+1; this.dashboard = getDashboard(dashboard.getTitle(), DashboardType.Team); List<CollectorItem> testItems = getCollectorItems(this.dashboard, WIDGET_CODE_ANALYSIS, CollectorType.Test); Collection<TestResultsAuditResponse> testResultsAuditResponse = new ArrayList<>(); if (CollectionUtils.isEmpty(testItems)) { throw new AuditException("No tests configured", AuditException.NO_COLLECTOR_ITEM_CONFIGURED); } testItems.forEach(testItem -> testResultsAuditResponse.add(getRegressionTestResultAudit(dashboard, testItem))); return testResultsAuditResponse; } @Override public TestResultsAuditResponse evaluate(CollectorItem collectorItem, long beginDate, long endDate, Map<?, ?> data) { return new TestResultsAuditResponse(); } /** * Gets the json response from test_results collection with story information based on tags. * @param testItem * @return */ protected TestResultsAuditResponse getRegressionTestResultAudit(Dashboard dashboard, CollectorItem testItem) { List<TestResult> testResults = testResultRepository.findByCollectorItemIdAndTimestampIsBetweenOrderByTimestampDesc(testItem.getId(), beginDate, endDate); return performTestResultAudit(dashboard, testItem, testResults); } /** * Perform test result audit * * @param testItem * @param testResults * @return testResultsAuditResponse */ private TestResultsAuditResponse performTestResultAudit(Dashboard dashboard, CollectorItem testItem, List<TestResult> testResults) { TestResultsAuditResponse testResultsAuditResponse = new TestResultsAuditResponse(); testResultsAuditResponse.setAuditEntity(testItem.getOptions()); testResultsAuditResponse.setLastUpdated(testItem.getLastUpdated()); if (CollectionUtils.isEmpty(testResults) || !isValidTestResultTestSuitType(testResults)){ testResultsAuditResponse.addAuditStatus(TestResultAuditStatus.TEST_RESULT_MISSING); return testResultsAuditResponse; } TestResult testResult = testResults.stream().sorted(Comparator.comparing(TestResult::getTimestamp).reversed()).findFirst().get(); testResultsAuditResponse.setLastExecutionTime(testResult.getStartTime()); testResultsAuditResponse.setType(testResult.getType().toString()); testResultsAuditResponse.setFeatureTestResult(getFeatureTestResult(testResult)); testResultsAuditResponse = updateTraceabilityDetails(dashboard, testResult, testResultsAuditResponse); List<TestCapability> testCapabilities = testResult.getTestCapabilities().stream().collect(Collectors.toList()); testResultsAuditResponse = updateTestResultAuditStatuses(testCapabilities, testResultsAuditResponse); // Clearing for readability in response for(TestCapability test: testCapabilities){ test.setTestSuites(null); } testResultsAuditResponse.setTestCapabilities(testCapabilities); return testResultsAuditResponse; } /*** * Update traceability details with calculated percent value * @param testResult,testResultsAuditResponse * @return testResultsAuditResponse */ private TestResultsAuditResponse updateTraceabilityDetails(Dashboard dashboard, TestResult testResult, TestResultsAuditResponse testResultsAuditResponse) { Traceability traceability = new Traceability(); List<String> totalStoriesList = new ArrayList<>(); List<String> totalCompletedStories = new ArrayList<>(); List<HashMap> totalStories = new ArrayList<>(); double traceabilityThreshold = settings.getTraceabilityThreshold(); Widget featureWidget = getFeatureWidget(dashboard); Optional<Object> teamIdOpt = Optional.ofNullable(featureWidget.getOptions().get(STR_TEAM_ID)); String teamId = teamIdOpt.isPresent() ? teamIdOpt.get().toString() : ""; List<Feature> featureList = featureRepository.getStoryByTeamID(teamId); featureList.stream().forEach(feature -> { HashMap<String, String> storyAuditStatusMap = new HashMap<>(); totalStoriesList.add(feature.getsNumber()); if(isValidChangeDate(feature)) { if(this.isValidStoryStatus(feature.getsStatus())){ totalCompletedStories.add(feature.getsNumber()); storyAuditStatusMap.put(feature.getsNumber(), TestResultAuditStatus.TEST_RESULTS_TRACEABILITY_STORY_MATCH.name()); } else{ storyAuditStatusMap.put(feature.getsNumber(), TestResultAuditStatus.TEST_RESULTS_TRACEABILITY_STORY_STATUS_INVALID.name()); } } else { storyAuditStatusMap.put(feature.getsNumber(), TestResultAuditStatus.TEST_RESULTS_TRACEABILITY_STORY_NOT_FOUND.name()); } totalStories.add(storyAuditStatusMap); }); if (totalCompletedStories.size() > NumberUtils.INTEGER_ZERO) { int totalStoryIndicatorCount = getTotalStoryIndicators(testResult).size(); double percentage = (totalStoryIndicatorCount * 100) / totalCompletedStories.size(); traceability.setPercentage(percentage); if (traceabilityThreshold == NumberUtils.DOUBLE_ZERO) { testResultsAuditResponse.addAuditStatus(TestResultAuditStatus.TEST_RESULTS_TRACEABILITY_THRESHOLD_DEFAULT); } if(percentage == NumberUtils.DOUBLE_ZERO){ testResultsAuditResponse.addAuditStatus(TestResultAuditStatus.TEST_RESULTS_TRACEABILITY_NOT_FOUND); } } else { testResultsAuditResponse.addAuditStatus(TestResultAuditStatus.TEST_RESULTS_TRACEABILITY_NOT_FOUND_IN_GIVEN_DATE_RANGE); } traceability.setTotalCompletedStories(totalCompletedStories); traceability.setTotalStories(totalStories); traceability.setTotalStoryCount(totalStories.size()); traceability.setThreshold(traceabilityThreshold); testResultsAuditResponse.setTraceability(traceability); return testResultsAuditResponse; } /** * Get story indicators by matching test case tags with feature stories * @param testResult * @return */ private List<StoryIndicator> getTotalStoryIndicators(TestResult testResult) { Pattern featureIdPattern = Pattern.compile(settings.getFeatureIDPattern()); List<StoryIndicator> totalStoryIndicatorList = new ArrayList<>(); testResult.getTestCapabilities().stream() .map(TestCapability::getTestSuites).flatMap(Collection::stream) .map(TestSuite::getTestCases).flatMap(Collection::stream) .forEach(testCase -> { List<StoryIndicator> storyIndicatorList = new ArrayList<>(); testCase.getTags().forEach(tag -> { if (featureIdPattern.matcher(getValidFeatureId(tag)).find()) { List<Feature> features = featureRepository.getStoryByNumber(tag); features.forEach(feature -> { if (isValidChangeDate(feature) && isValidStoryStatus(feature.getsStatus())) { StoryIndicator storyIndicator = new StoryIndicator(); storyIndicator.setStoryId(feature.getsId()); storyIndicator.setStoryType(feature.getsTypeName()); storyIndicator.setStoryNumber(feature.getsNumber()); storyIndicator.setStoryName(feature.getsName()); storyIndicator.setEpicNumber(feature.getsEpicNumber()); storyIndicator.setEpicName(feature.getsEpicName()); storyIndicator.setProjectName(feature.getsProjectName()); storyIndicator.setTeamName(feature.getsTeamName()); storyIndicator.setSprintName(feature.getsSprintName()); storyIndicator.setStoryStatus(feature.getsStatus()); storyIndicator.setStoryState(feature.getsState()); storyIndicatorList.add(storyIndicator); } }); } }); storyIndicatorList.forEach(storyIndicator -> { if (!totalStoryIndicatorList.contains(storyIndicator)) { totalStoryIndicatorList.add(storyIndicator); } }); testCase.setStoryIndicators(storyIndicatorList); }); return totalStoryIndicatorList; } private CharSequence getValidFeatureId(String tag) { tag = tag.replaceAll(STR_UNDERSCORE, STR_HYPHEN).replaceAll(STR_AT, STR_EMPTY); return tag; } /** * Coverts the Human readable time date to Epoch Time Stamp in Milliseconds * @param feature * @return */ private long getEpochChangeDate(Feature feature) { String datePattern = "yyyy-MM-dd'T'HH:mm:ss.SSS"; long changeDate = 0; try { SimpleDateFormat sdf = new SimpleDateFormat(datePattern); Date dt = sdf.parse(feature.getChangeDate()); changeDate = dt.getTime(); } catch(ParseException e) { e.printStackTrace(); LOGGER.error("Error in RegressionTestResultEvaluator.getEpochChangeDate() - Unable to match date pattern - " + e.getMessage()); } return changeDate; } /** * Check whether the story status is valid * @param storyStatus * @return */ private boolean isValidStoryStatus(String storyStatus) { final List<String> validStatus = settings.getValidStoryStatus(); return validStatus.contains(storyStatus.toUpperCase()); } /** * Check whether the feature date is valid * @param feature * @return */ private boolean isValidChangeDate(Feature feature){ return (this.getEpochChangeDate(feature) >= beginDate && this.getEpochChangeDate(feature) <= endDate); } /** * Get dashboard by title and type * @param title * @param dashboardType * @return */ private Dashboard getDashboard(String title, DashboardType dashboardType) { return dashboardRepository.findByTitleAndType(title, dashboardType); } /** * Check whether the test result test suit type is valid * @param testResults * @return */ public boolean isValidTestResultTestSuitType(List<TestResult> testResults) { return testResults.stream() .anyMatch(testResult -> testResult.getType().equals(TestSuiteType.Functional) || testResult.getType().equals(TestSuiteType.Manual) || testResult.getType().equals(TestSuiteType.Regression)); } /** * Get feature widget * @return */ public Widget getFeatureWidget(Dashboard dashboard) { return dashboard.getWidgets() .stream() .filter(widget -> widget.getName().equalsIgnoreCase(WIDGET_FEATURE)) .findFirst().orElse(new Widget()); } /** * Builds feature test result data map * @param testResult * @return featureTestResultMap */ protected HashMap getFeatureTestResult(TestResult testResult) { HashMap<String,Integer> featureTestResultMap = new HashMap<>(); featureTestResultMap.put(SUCCESS_COUNT, testResult.getSuccessCount()); featureTestResultMap.put(FAILURE_COUNT, testResult.getFailureCount()); featureTestResultMap.put(SKIP_COUNT, testResult.getSkippedCount()); featureTestResultMap.put(TOTAL_COUNT,testResult.getTotalCount()); Collection<TestCapability> testCapabilities = testResult.getTestCapabilities(); int totalTestCaseCount = testCapabilities.stream().mapToInt(testCapability -> testCapability.getTestSuites().parallelStream().mapToInt(TestSuite::getTotalTestCaseCount).sum()).sum(); int testCaseSuccessCount = testCapabilities.stream().mapToInt(testCapability -> testCapability.getTestSuites().parallelStream().mapToInt(TestSuite::getSuccessTestCaseCount).sum()).sum(); int testCaseFailureCount = testCapabilities.stream().mapToInt(testCapability -> testCapability.getTestSuites().parallelStream().mapToInt(TestSuite::getFailedTestCaseCount).sum()).sum(); int testCaseSkippedCount = testCapabilities.stream().mapToInt(testCapability -> testCapability.getTestSuites().parallelStream().mapToInt(TestSuite::getSkippedTestCaseCount).sum()).sum(); featureTestResultMap.put(TEST_CASE_TOTAL_COUNT, totalTestCaseCount); featureTestResultMap.put(TEST_CASE_SUCCESS_COUNT, testCaseSuccessCount); featureTestResultMap.put(TEST_CASE_FAILURE_COUNT, testCaseFailureCount); featureTestResultMap.put(TEST_CASE_SKIPPED_COUNT, testCaseSkippedCount); return featureTestResultMap; } /** * update test result audit statuses * @param testCapabilities * @param testResultsAuditResponse * @return */ private TestResultsAuditResponse updateTestResultAuditStatuses(List<TestCapability> testCapabilities, TestResultsAuditResponse testResultsAuditResponse) { boolean isSuccessHighPriority = settings.getTestResultSuccessPriority().equalsIgnoreCase(PRIORITY_HIGH); boolean isFailureHighPriority = settings.getTestResultFailurePriority().equalsIgnoreCase(PRIORITY_HIGH); if(isAllTestCasesSkipped(testCapabilities)){ testResultsAuditResponse.addAuditStatus(TestResultAuditStatus.TEST_RESULT_SKIPPED); return testResultsAuditResponse; } double testCasePassPercent = this.getTestCasePassPercent(testCapabilities); if (isFailureHighPriority){ if (testCasePassPercent < settings.getTestResultThreshold()) { testResultsAuditResponse.addAuditStatus(TestResultAuditStatus.TEST_RESULT_AUDIT_FAIL); } else { testResultsAuditResponse.addAuditStatus(TestResultAuditStatus.TEST_RESULT_AUDIT_OK); } }else if (isSuccessHighPriority){ if (testCasePassPercent > NumberUtils.INTEGER_ZERO) { testResultsAuditResponse.addAuditStatus(TestResultAuditStatus.TEST_RESULT_AUDIT_OK); } else { testResultsAuditResponse.addAuditStatus(TestResultAuditStatus.TEST_RESULT_AUDIT_FAIL); } }else { testResultsAuditResponse.addAuditStatus(TestResultAuditStatus.TEST_RESULT_MISSING); } return testResultsAuditResponse; } /** * Get test result pass percent * @param testCapabilities * @return */ private double getTestCasePassPercent(List<TestCapability> testCapabilities) { double testCaseSuccessCount = testCapabilities.stream().mapToDouble(testCapability -> testCapability.getTestSuites().parallelStream().mapToDouble(TestSuite::getSuccessTestCaseCount).sum() ).sum(); double totalTestCaseCount = testCapabilities.stream().mapToDouble(testCapability -> testCapability.getTestSuites().parallelStream().mapToDouble(TestSuite::getTotalTestCaseCount).sum() ).sum(); return (testCaseSuccessCount/totalTestCaseCount) * 100; } public void setSettings(ApiSettings settings) { this.settings = settings; } /** * Check if all the test cases are skipped * @param testCapabilities * @return */ public boolean isAllTestCasesSkipped(List<TestCapability> testCapabilities) { int totalTestCaseCount = testCapabilities.stream().mapToInt(testCapability -> testCapability.getTestSuites().parallelStream().mapToInt(TestSuite::getTotalTestCaseCount).sum() ).sum(); int testCaseSkippedCount = testCapabilities.stream().mapToInt(testCapability -> testCapability.getTestSuites().parallelStream().mapToInt(TestSuite::getSkippedTestCaseCount).sum() ).sum(); boolean isSkippedHighPriority = settings.getTestResultSkippedPriority().equalsIgnoreCase(PRIORITY_HIGH); if ((testCaseSkippedCount >= totalTestCaseCount) && isSkippedHighPriority){ return true; } return false; } }
api-audit/src/main/java/com/capitalone/dashboard/evaluator/RegressionTestResultEvaluator.java
package com.capitalone.dashboard.evaluator; import com.capitalone.dashboard.ApiSettings; import com.capitalone.dashboard.model.AuditException; import com.capitalone.dashboard.model.CollectorItem; import com.capitalone.dashboard.model.CollectorType; import com.capitalone.dashboard.model.Dashboard; import com.capitalone.dashboard.model.DashboardType; import com.capitalone.dashboard.model.TestResult; import com.capitalone.dashboard.model.TestSuiteType; import com.capitalone.dashboard.model.TestCapability; import com.capitalone.dashboard.model.TestSuite; import com.capitalone.dashboard.model.Traceability; import com.capitalone.dashboard.model.Widget; import com.capitalone.dashboard.model.Feature; import com.capitalone.dashboard.model.StoryIndicator; import com.capitalone.dashboard.repository.FeatureRepository; import com.capitalone.dashboard.repository.TestResultRepository; import com.capitalone.dashboard.response.TestResultsAuditResponse; import com.capitalone.dashboard.status.TestResultAuditStatus; import org.apache.commons.collections.CollectionUtils; import org.apache.commons.lang3.math.NumberUtils; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.ArrayList; import java.util.Date; import java.util.Comparator; import java.util.Optional; import java.util.Map; import java.util.regex.Pattern; import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.stream.Collectors; @Component public class RegressionTestResultEvaluator extends Evaluator<TestResultsAuditResponse> { private final TestResultRepository testResultRepository; private final FeatureRepository featureRepository; private static final Logger LOGGER = LoggerFactory.getLogger(RegressionTestResultEvaluator.class); private long beginDate; private long endDate; private Dashboard dashboard; private static final String WIDGET_CODE_ANALYSIS = "codeanalysis"; private static final String WIDGET_FEATURE = "feature"; private static final String STR_TEAM_ID = "teamId"; private static final String STR_UNDERSCORE = "_"; private static final String STR_HYPHEN = "-"; private static final String STR_AT = "@"; private static final String STR_EMPTY = ""; private static final String SUCCESS_COUNT = "successCount"; private static final String FAILURE_COUNT = "failureCount"; private static final String SKIP_COUNT = "skippedCount"; private static final String TOTAL_COUNT = "totalCount"; private static final String PRIORITY_HIGH = "High"; @Autowired public RegressionTestResultEvaluator(TestResultRepository testResultRepository, FeatureRepository featureRepository) { this.testResultRepository = testResultRepository; this.featureRepository = featureRepository; } @Override public Collection<TestResultsAuditResponse> evaluate(Dashboard dashboard, long beginDate, long endDate, Map<?, ?> dummy) throws AuditException { this.beginDate = beginDate-1; this.endDate = endDate+1; this.dashboard = getDashboard(dashboard.getTitle(), DashboardType.Team); List<CollectorItem> testItems = getCollectorItems(this.dashboard, WIDGET_CODE_ANALYSIS, CollectorType.Test); Collection<TestResultsAuditResponse> testResultsAuditResponse = new ArrayList<>(); if (CollectionUtils.isEmpty(testItems)) { throw new AuditException("No tests configured", AuditException.NO_COLLECTOR_ITEM_CONFIGURED); } testItems.forEach(testItem -> testResultsAuditResponse.add(getRegressionTestResultAudit(dashboard, testItem))); return testResultsAuditResponse; } @Override public TestResultsAuditResponse evaluate(CollectorItem collectorItem, long beginDate, long endDate, Map<?, ?> data) { return new TestResultsAuditResponse(); } /** * Gets the json response from test_results collection with story information based on tags. * @param testItem * @return */ protected TestResultsAuditResponse getRegressionTestResultAudit(Dashboard dashboard, CollectorItem testItem) { List<TestResult> testResults = testResultRepository.findByCollectorItemIdAndTimestampIsBetweenOrderByTimestampDesc(testItem.getId(), beginDate, endDate); return performTestResultAudit(dashboard, testItem, testResults); } /** * Perform test result audit * * @param testItem * @param testResults * @return testResultsAuditResponse */ private TestResultsAuditResponse performTestResultAudit(Dashboard dashboard, CollectorItem testItem, List<TestResult> testResults) { TestResultsAuditResponse testResultsAuditResponse = new TestResultsAuditResponse(); testResultsAuditResponse.setAuditEntity(testItem.getOptions()); testResultsAuditResponse.setLastUpdated(testItem.getLastUpdated()); if (CollectionUtils.isEmpty(testResults) || !isValidTestResultTestSuitType(testResults)){ testResultsAuditResponse.addAuditStatus(TestResultAuditStatus.TEST_RESULT_MISSING); return testResultsAuditResponse; } TestResult testResult = testResults.stream().sorted(Comparator.comparing(TestResult::getTimestamp).reversed()).findFirst().get(); testResultsAuditResponse.setLastExecutionTime(testResult.getStartTime()); testResultsAuditResponse.setType(testResult.getType().toString()); testResultsAuditResponse.setFeatureTestResult(getFeatureTestResult(testResult)); testResultsAuditResponse = updateTraceabilityDetails(dashboard, testResult, testResultsAuditResponse); List<TestCapability> testCapabilities = testResult.getTestCapabilities().stream().collect(Collectors.toList()); testResultsAuditResponse = updateTestResultAuditStatuses(testCapabilities, testResultsAuditResponse); // Clearing for readability in response for(TestCapability test: testCapabilities){ test.setTestSuites(null); } testResultsAuditResponse.setTestCapabilities(testCapabilities); return testResultsAuditResponse; } /*** * Update traceability details with calculated percent value * @param testResult,testResultsAuditResponse * @return testResultsAuditResponse */ private TestResultsAuditResponse updateTraceabilityDetails(Dashboard dashboard, TestResult testResult, TestResultsAuditResponse testResultsAuditResponse) { Traceability traceability = new Traceability(); List<String> totalStoriesList = new ArrayList<>(); List<String> totalCompletedStories = new ArrayList<>(); List<HashMap> totalStories = new ArrayList<>(); double traceabilityThreshold = settings.getTraceabilityThreshold(); Widget featureWidget = getFeatureWidget(dashboard); Optional<Object> teamIdOpt = Optional.ofNullable(featureWidget.getOptions().get(STR_TEAM_ID)); String teamId = teamIdOpt.isPresent() ? teamIdOpt.get().toString() : ""; List<Feature> featureList = featureRepository.getStoryByTeamID(teamId); featureList.stream().forEach(feature -> { HashMap<String, String> storyAuditStatusMap = new HashMap<>(); totalStoriesList.add(feature.getsNumber()); if(isValidChangeDate(feature)) { if(this.isValidStoryStatus(feature.getsStatus())){ totalCompletedStories.add(feature.getsNumber()); storyAuditStatusMap.put(feature.getsNumber(), TestResultAuditStatus.TEST_RESULTS_TRACEABILITY_STORY_MATCH.name()); } else{ storyAuditStatusMap.put(feature.getsNumber(), TestResultAuditStatus.TEST_RESULTS_TRACEABILITY_STORY_STATUS_INVALID.name()); } } else { storyAuditStatusMap.put(feature.getsNumber(), TestResultAuditStatus.TEST_RESULTS_TRACEABILITY_STORY_NOT_FOUND.name()); } totalStories.add(storyAuditStatusMap); }); if (totalCompletedStories.size() > NumberUtils.INTEGER_ZERO) { int totalStoryIndicatorCount = getTotalStoryIndicators(testResult).size(); double percentage = (totalStoryIndicatorCount * 100) / totalCompletedStories.size(); traceability.setPercentage(percentage); if (traceabilityThreshold == NumberUtils.DOUBLE_ZERO) { testResultsAuditResponse.addAuditStatus(TestResultAuditStatus.TEST_RESULTS_TRACEABILITY_THRESHOLD_DEFAULT); } if(percentage == NumberUtils.DOUBLE_ZERO){ testResultsAuditResponse.addAuditStatus(TestResultAuditStatus.TEST_RESULTS_TRACEABILITY_NOT_FOUND); } } else { testResultsAuditResponse.addAuditStatus(TestResultAuditStatus.TEST_RESULTS_TRACEABILITY_NOT_FOUND_IN_GIVEN_DATE_RANGE); } traceability.setTotalCompletedStories(totalCompletedStories); traceability.setTotalStories(totalStories); traceability.setTotalStoryCount(totalStories.size()); traceability.setThreshold(traceabilityThreshold); testResultsAuditResponse.setTraceability(traceability); return testResultsAuditResponse; } /** * Get story indicators by matching test case tags with feature stories * @param testResult * @return */ private List<StoryIndicator> getTotalStoryIndicators(TestResult testResult) { Pattern featureIdPattern = Pattern.compile(settings.getFeatureIDPattern()); List<StoryIndicator> totalStoryIndicatorList = new ArrayList<>(); testResult.getTestCapabilities().stream() .map(TestCapability::getTestSuites).flatMap(Collection::stream) .map(TestSuite::getTestCases).flatMap(Collection::stream) .forEach(testCase -> { List<StoryIndicator> storyIndicatorList = new ArrayList<>(); testCase.getTags().forEach(tag -> { if (featureIdPattern.matcher(getValidFeatureId(tag)).find()) { List<Feature> features = featureRepository.getStoryByNumber(tag); features.forEach(feature -> { if (isValidChangeDate(feature) && isValidStoryStatus(feature.getsStatus())) { StoryIndicator storyIndicator = new StoryIndicator(); storyIndicator.setStoryId(feature.getsId()); storyIndicator.setStoryType(feature.getsTypeName()); storyIndicator.setStoryNumber(feature.getsNumber()); storyIndicator.setStoryName(feature.getsName()); storyIndicator.setEpicNumber(feature.getsEpicNumber()); storyIndicator.setEpicName(feature.getsEpicName()); storyIndicator.setProjectName(feature.getsProjectName()); storyIndicator.setTeamName(feature.getsTeamName()); storyIndicator.setSprintName(feature.getsSprintName()); storyIndicator.setStoryStatus(feature.getsStatus()); storyIndicator.setStoryState(feature.getsState()); storyIndicatorList.add(storyIndicator); } }); } }); storyIndicatorList.forEach(storyIndicator -> { if (!totalStoryIndicatorList.contains(storyIndicator)) { totalStoryIndicatorList.add(storyIndicator); } }); testCase.setStoryIndicators(storyIndicatorList); }); return totalStoryIndicatorList; } private CharSequence getValidFeatureId(String tag) { tag = tag.replaceAll(STR_UNDERSCORE, STR_HYPHEN).replaceAll(STR_AT, STR_EMPTY); return tag; } /** * Coverts the Human readable time date to Epoch Time Stamp in Milliseconds * @param feature * @return */ private long getEpochChangeDate(Feature feature) { String datePattern = "yyyy-MM-dd'T'HH:mm:ss.SSS"; long changeDate = 0; try { SimpleDateFormat sdf = new SimpleDateFormat(datePattern); Date dt = sdf.parse(feature.getChangeDate()); changeDate = dt.getTime(); } catch(ParseException e) { e.printStackTrace(); LOGGER.error("Error in RegressionTestResultEvaluator.getEpochChangeDate() - Unable to match date pattern - " + e.getMessage()); } return changeDate; } /** * Check whether the story status is valid * @param storyStatus * @return */ private boolean isValidStoryStatus(String storyStatus) { final List<String> validStatus = settings.getValidStoryStatus(); return validStatus.contains(storyStatus.toUpperCase()); } /** * Check whether the feature date is valid * @param feature * @return */ private boolean isValidChangeDate(Feature feature){ return (this.getEpochChangeDate(feature) >= beginDate && this.getEpochChangeDate(feature) <= endDate); } /** * Get dashboard by title and type * @param title * @param dashboardType * @return */ private Dashboard getDashboard(String title, DashboardType dashboardType) { return dashboardRepository.findByTitleAndType(title, dashboardType); } /** * Check whether the test result test suit type is valid * @param testResults * @return */ public boolean isValidTestResultTestSuitType(List<TestResult> testResults) { return testResults.stream() .anyMatch(testResult -> testResult.getType().equals(TestSuiteType.Functional) || testResult.getType().equals(TestSuiteType.Manual) || testResult.getType().equals(TestSuiteType.Regression)); } /** * Get feature widget * @return */ public Widget getFeatureWidget(Dashboard dashboard) { return dashboard.getWidgets() .stream() .filter(widget -> widget.getName().equalsIgnoreCase(WIDGET_FEATURE)) .findFirst().orElse(new Widget()); } /** * Builds feature test result data map * @param testResult * @return featureTestResultMap */ protected HashMap getFeatureTestResult(TestResult testResult) { HashMap<String,Integer> featureTestResultMap = new HashMap<>(); featureTestResultMap.put(SUCCESS_COUNT, testResult.getSuccessCount()); featureTestResultMap.put(FAILURE_COUNT, testResult.getFailureCount()); featureTestResultMap.put(SKIP_COUNT, testResult.getSkippedCount()); featureTestResultMap.put(TOTAL_COUNT,testResult.getTotalCount()); return featureTestResultMap; } /** * update test result audit statuses * @param testCapabilities * @param testResultsAuditResponse * @return */ private TestResultsAuditResponse updateTestResultAuditStatuses(List<TestCapability> testCapabilities, TestResultsAuditResponse testResultsAuditResponse) { boolean isSuccessHighPriority = settings.getTestResultSuccessPriority().equalsIgnoreCase(PRIORITY_HIGH); boolean isFailureHighPriority = settings.getTestResultFailurePriority().equalsIgnoreCase(PRIORITY_HIGH); if(isAllTestCasesSkipped(testCapabilities)){ testResultsAuditResponse.addAuditStatus(TestResultAuditStatus.TEST_RESULT_SKIPPED); return testResultsAuditResponse; } double testCasePassPercent = this.getTestCasePassPercent(testCapabilities); if (isFailureHighPriority){ if (testCasePassPercent < settings.getTestResultThreshold()) { testResultsAuditResponse.addAuditStatus(TestResultAuditStatus.TEST_RESULT_AUDIT_FAIL); } else { testResultsAuditResponse.addAuditStatus(TestResultAuditStatus.TEST_RESULT_AUDIT_OK); } }else if (isSuccessHighPriority){ if (testCasePassPercent > NumberUtils.INTEGER_ZERO) { testResultsAuditResponse.addAuditStatus(TestResultAuditStatus.TEST_RESULT_AUDIT_OK); } else { testResultsAuditResponse.addAuditStatus(TestResultAuditStatus.TEST_RESULT_AUDIT_FAIL); } }else { testResultsAuditResponse.addAuditStatus(TestResultAuditStatus.TEST_RESULT_MISSING); } return testResultsAuditResponse; } /** * Get test result pass percent * @param testCapabilities * @return */ private double getTestCasePassPercent(List<TestCapability> testCapabilities) { double testCaseSuccessCount = testCapabilities.stream().mapToDouble(testCapability -> testCapability.getTestSuites().parallelStream().mapToDouble(TestSuite::getSuccessTestCaseCount).sum() ).sum(); double totalTestCaseCount = testCapabilities.stream().mapToDouble(testCapability -> testCapability.getTestSuites().parallelStream().mapToDouble(TestSuite::getTotalTestCaseCount).sum() ).sum(); return (testCaseSuccessCount/totalTestCaseCount) * 100; } public void setSettings(ApiSettings settings) { this.settings = settings; } /** * Check if all the test cases are skipped * @param testCapabilities * @return */ public boolean isAllTestCasesSkipped(List<TestCapability> testCapabilities) { int totalTestCaseCount = testCapabilities.stream().mapToInt(testCapability -> testCapability.getTestSuites().parallelStream().mapToInt(TestSuite::getTotalTestCaseCount).sum() ).sum(); int testCaseSkippedCount = testCapabilities.stream().mapToInt(testCapability -> testCapability.getTestSuites().parallelStream().mapToInt(TestSuite::getSkippedTestCaseCount).sum() ).sum(); boolean isSkippedHighPriority = settings.getTestResultSkippedPriority().equalsIgnoreCase(PRIORITY_HIGH); if ((testCaseSkippedCount >= totalTestCaseCount) && isSkippedHighPriority){ return true; } return false; } }
test case counts added in featureTestResult (#2895)
api-audit/src/main/java/com/capitalone/dashboard/evaluator/RegressionTestResultEvaluator.java
test case counts added in featureTestResult (#2895)
<ide><path>pi-audit/src/main/java/com/capitalone/dashboard/evaluator/RegressionTestResultEvaluator.java <ide> private static final String FAILURE_COUNT = "failureCount"; <ide> private static final String SKIP_COUNT = "skippedCount"; <ide> private static final String TOTAL_COUNT = "totalCount"; <add> private static final String TEST_CASE_SUCCESS_COUNT = "successTestCaseCount"; <add> private static final String TEST_CASE_FAILURE_COUNT = "failureTestCaseCount"; <add> private static final String TEST_CASE_SKIPPED_COUNT = "skippedTestCaseCount"; <add> private static final String TEST_CASE_TOTAL_COUNT = "totalTestCaseCount"; <ide> private static final String PRIORITY_HIGH = "High"; <ide> <ide> @Autowired <ide> storyIndicatorList.add(storyIndicator); <ide> } <ide> }); <del> } <add> } <ide> }); <ide> storyIndicatorList.forEach(storyIndicator -> { <ide> if (!totalStoryIndicatorList.contains(storyIndicator)) { <ide> featureTestResultMap.put(FAILURE_COUNT, testResult.getFailureCount()); <ide> featureTestResultMap.put(SKIP_COUNT, testResult.getSkippedCount()); <ide> featureTestResultMap.put(TOTAL_COUNT,testResult.getTotalCount()); <add> <add> Collection<TestCapability> testCapabilities = testResult.getTestCapabilities(); <add> int totalTestCaseCount = testCapabilities.stream().mapToInt(testCapability -> <add> testCapability.getTestSuites().parallelStream().mapToInt(TestSuite::getTotalTestCaseCount).sum()).sum(); <add> int testCaseSuccessCount = testCapabilities.stream().mapToInt(testCapability -> <add> testCapability.getTestSuites().parallelStream().mapToInt(TestSuite::getSuccessTestCaseCount).sum()).sum(); <add> int testCaseFailureCount = testCapabilities.stream().mapToInt(testCapability -> <add> testCapability.getTestSuites().parallelStream().mapToInt(TestSuite::getFailedTestCaseCount).sum()).sum(); <add> int testCaseSkippedCount = testCapabilities.stream().mapToInt(testCapability -> <add> testCapability.getTestSuites().parallelStream().mapToInt(TestSuite::getSkippedTestCaseCount).sum()).sum(); <add> <add> featureTestResultMap.put(TEST_CASE_TOTAL_COUNT, totalTestCaseCount); <add> featureTestResultMap.put(TEST_CASE_SUCCESS_COUNT, testCaseSuccessCount); <add> featureTestResultMap.put(TEST_CASE_FAILURE_COUNT, testCaseFailureCount); <add> featureTestResultMap.put(TEST_CASE_SKIPPED_COUNT, testCaseSkippedCount); <add> <ide> return featureTestResultMap; <ide> } <ide>
Java
mit
435d1d1dce9feec1defb48dcf2f20ac1ccea0ce5
0
remyd/slickgraph
package fr.caladan.slickgraph; import java.awt.Toolkit; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.stream.IntStream; import javafx.beans.InvalidationListener; import javafx.beans.property.DoubleProperty; import javafx.beans.property.SimpleBooleanProperty; import javafx.beans.property.SimpleDoubleProperty; import javafx.beans.property.SimpleObjectProperty; import javafx.collections.FXCollections; import javafx.collections.ListChangeListener; import javafx.collections.ObservableList; import javafx.event.EventHandler; import javafx.scene.Group; import javafx.scene.canvas.Canvas; import javafx.scene.canvas.GraphicsContext; import javafx.scene.input.InputEvent; import javafx.scene.input.MouseEvent; import javafx.scene.input.ScrollEvent; import javafx.scene.paint.Color; import javafx.stage.Screen; /** * Slick Graph is a binning and smoothing technique for time series visualization. * Slick Graphs mitigate quantization artifacts by using the smallest possible binning intervals, i.e. pixels. * They nonetheless provide smooth variations by using a convolution with a kernel. * The filtered-out information that would be lost by this smoothing step is encoded using the luminance channel. */ public class SlickGraph extends Group { /** Canvas used to render the timeseries */ protected Canvas canvas; public DoubleProperty widthProperty() { return canvas.widthProperty(); } public DoubleProperty heightProperty() { return canvas.heightProperty(); } /** Timeseries to render */ protected ObservableList<Timeseries> timeseries; public List<Timeseries> getTimeseries() { return timeseries; } public void setTimeseries(List<Timeseries> timeseries) { start = timeseries.get(0).getData().get(0); end = timeseries.get(0).getData().get(timeseries.get(0).getData().size() - 1); this.timeseries.clear(); this.timeseries.addAll(timeseries); } /** Bandwidth to use when computing the kernel estimation */ protected SimpleDoubleProperty kernelBandWidthProperty; public SimpleDoubleProperty kernelBandwidthProperty() { return kernelBandWidthProperty; } public void setKernelBandWidth(double kernelBandWidth) { kernelBandWidthProperty.set(Math.max(1., kernelBandWidth)); } public double getKernelBandWidth() { return kernelBandWidthProperty.get(); } /** Start timestamp */ protected double start; /** End timestamp */ protected double end; /** Histogram computed after aggregation based on the pixels */ protected Map<Timeseries, List<Double>> mapHistograms; /** Histogram values after the convolution */ protected Map<Timeseries, List<Double>> mapSmoothedHistogram; /** Vertices of the histogram */ protected List<Vertex> histogramVertices; /** Vertices of the graph */ protected Map<Timeseries, List<Vertex>> mapVertices; /** Horizontal scale factor */ protected SimpleDoubleProperty xScaleProperty; /** Vertical scale factor */ protected SimpleDoubleProperty yScaleProperty; /** Width (in physical pixels) of the canvas */ protected double scaledWidth; /** Height (in physical pixels) of the canvas */ protected double scaledHeight; /** Time cursor */ protected TimeCursor timeCursor; /** Time cursor visibility property */ protected SimpleBooleanProperty timeCursorVisibleProperty; public SimpleBooleanProperty timeCursorVisibleProperty() { return timeCursorVisibleProperty; } public boolean isTimeCursorVisible() { return timeCursorVisibleProperty.get(); } public void setTimeCursorVisible(boolean visible) { timeCursorVisibleProperty.set(visible); } /** Indicates whether hide or show the shading representing the difference between the smoothed and the real value */ protected SimpleBooleanProperty showShadingProperty; public SimpleBooleanProperty showShadingProperty() { return showShadingProperty; } public boolean isShadingShown() { return showShadingProperty.get(); } public void setShowShading(boolean showShading) { showShadingProperty.set(showShading); } /** List of alpha values for the SlickGraph shading */ protected List<Double> slgAlphas; /** Color of the timeseries outine */ protected SimpleObjectProperty<Color> curveColorProperty; public SimpleObjectProperty<Color> curveColorProperty() { return curveColorProperty; } public Color getCurveColor() { return curveColorProperty.get(); } public void setCurveColor(Color curveColor) { curveColorProperty.set(curveColor); } /** Indicates whether hide or show the curve of the graph */ protected SimpleBooleanProperty showCurveProperty; public SimpleBooleanProperty showCurveProperty() { return showCurveProperty; } public boolean isCurveShown() { return showCurveProperty.get(); } public void setShowCurve(boolean showCurve) { showCurveProperty.set(showCurve); } /** Public default constructor - initializes the properties */ public SlickGraph() { super(); canvas = new Canvas(); getChildren().add(canvas); timeseries = FXCollections.observableArrayList(); kernelBandWidthProperty = new SimpleDoubleProperty(5.0); mapHistograms = new HashMap<Timeseries, List<Double>>(); mapSmoothedHistogram = new HashMap<Timeseries, List<Double>>(); start = -1; end = -1; histogramVertices = new ArrayList<Vertex>(); mapVertices = new HashMap<Timeseries, List<Vertex>>(); xScaleProperty = new SimpleDoubleProperty(1.); yScaleProperty = new SimpleDoubleProperty(1.); timeCursor = new TimeCursor(); getChildren().add(timeCursor); timeCursorVisibleProperty = new SimpleBooleanProperty(true); showShadingProperty = new SimpleBooleanProperty(true); slgAlphas = new ArrayList<Double>(); showCurveProperty = new SimpleBooleanProperty(true); curveColorProperty = new SimpleObjectProperty<Color>(Color.BLACK); // render the graph when a timeseries is added or removed timeseries.addListener((ListChangeListener.Change<? extends Timeseries> c) -> { computeVertices(); render(); }); canvas.widthProperty().addListener(e -> handleHiDPI()); canvas.heightProperty().addListener(e -> handleHiDPI()); timeCursor.getCursorLine().endYProperty().bind(canvas.heightProperty()); kernelBandWidthProperty.addListener(e -> { computeVertices(); render(); }); // mouse event for the time cursor EventHandler<? super InputEvent> mouseEventHandler = e -> { double x = e instanceof MouseEvent ? ((MouseEvent) e).getX() : ((ScrollEvent) e).getX(); int histogramSize = mapHistograms.get(timeseries.get(0)).size(); if (x < histogramSize) { double value = mapSmoothedHistogram.values().stream() .mapToDouble(h -> h.get((int) (x * xScaleProperty.get()) + (int) Math.floor(3. * kernelBandWidthProperty.get())) * (end - start) / scaledWidth) .sum(); timeCursor.setTooltipText(" y = " + value + " "); } // TODO can throw a NullPointerException int toTrim = (int) (Math.round(3. * kernelBandWidthProperty.get() / 2.) * 2); timeCursor.setPosition(x, mapVertices.get(timeseries.get(timeseries.size() - 1)).get((int) Math.round(x * 2. * xScaleProperty.get()) / 2 * 2 + toTrim).y / yScaleProperty.get()); }; addEventHandler(MouseEvent.MOUSE_MOVED, mouseEventHandler); addEventHandler(MouseEvent.MOUSE_DRAGGED, mouseEventHandler); addEventHandler(ScrollEvent.ANY, mouseEventHandler); timeCursor.visibleProperty().bind(timeCursorVisibleProperty); // bind the properties setting the visualization parameters InvalidationListener propertiesListener = e -> render(); showShadingProperty.addListener(propertiesListener); showCurveProperty.addListener(propertiesListener); curveColorProperty.addListener(propertiesListener); } /** * Constructor that initializes the size of the graph * * @param width Width of the graph * @param height Height of the graph */ public SlickGraph(double width, double height) { this(); canvas.setWidth(width); canvas.setHeight(height); } /** * Constructor that initializes the data to visualize * * @param data Data that represent the time serie to render * @throws Exception If the data is not valid (timestamps should be strictly increasing) */ public SlickGraph(List<Double> data) throws Exception { this(); List<Timeseries> timeseries = new ArrayList<Timeseries>(); timeseries.add(new Timeseries(data)); setTimeseries(timeseries); } /** * Constructor that initializes the size of the graph and the data to visualize * * @param width Width of the graph * @param height Height of the graph * @param data Data that represent the time serie to render * @throws Exception If the data is not valid (timestamps should be strictly increasing) */ public SlickGraph(double width, double height, List<Double> data) throws Exception { this(data); canvas.setWidth(width); canvas.setHeight(height); } /** Set the scale on the canvas to have a 1:1 pixel mapping */ protected void handleHiDPI() { double nativeWidth = Toolkit.getDefaultToolkit().getScreenSize().getWidth(); double nativeHeight = Toolkit.getDefaultToolkit().getScreenSize().getHeight(); double screenWidth = Screen.getPrimary().getVisualBounds().getWidth(); double screenHeight = Screen.getPrimary().getVisualBounds().getHeight(); double xScale = nativeWidth / screenWidth; double yScale = nativeHeight / screenHeight; scaledWidth = canvas.getWidth() * xScale; scaledHeight = canvas.getHeight() * yScale; // back to scale 1:1 canvas.getGraphicsContext2D().scale(xScaleProperty.get(), yScaleProperty.get()); // set the new scale canvas.getGraphicsContext2D().scale(1. / xScale, 1. / yScale); xScaleProperty.set(xScale); yScaleProperty.set(yScale); // update the view computeVertices(); render(); } /** * Return the list of the array of events corresponding to the bounds of the pixels in a given time window * * @param start Start timestamp of the time window * @param end End timestamp of the time window * @return */ protected double[] buildPixelBounds(double start, double end) { int toTrim = (int) Math.floor(6. * kernelBandWidthProperty.get()); double timeSliceDuration = (end - start) / (scaledWidth + toTrim); double[] pixelBounds = new double[(int) (scaledWidth + 1 + toTrim)]; for (int i = 0; i < pixelBounds.length; i++) { pixelBounds[i] = start + i * timeSliceDuration; } return pixelBounds; } /** * Compute the aggregation of a timeseries based on the pixels * * @param timeseries Timeseries to aggregate */ protected void buildHistogram(Timeseries timeseries) { List<Double> histogram = new ArrayList<Double>(); // build the timestamps at the pixels bounds double[] pixelBounds = buildPixelBounds(start, end); // build the list of indices that correspond to the pixel bounds List<Integer> listIndices = new ArrayList<Integer>(); for (int i = 0; i < pixelBounds.length; i++) { int boundEvent = Collections.binarySearch(timeseries.getData(), pixelBounds[i]); boundEvent = boundEvent >= 0 ? boundEvent : -boundEvent - 1; listIndices.add(boundEvent); } for (int i = 0; i < listIndices.size() - 1; i++) { histogram.add((listIndices.get(i + 1) - listIndices.get(i)) / (end - start) * scaledWidth); } mapHistograms.put(timeseries, histogram); } /** * Compute the Gaussian values * * @return Gaussian values */ protected List<Double> gaussianKernel() { double kernelBandWidth = kernelBandWidthProperty.get(); double h = 2. * kernelBandWidth * kernelBandWidth; double v = 1. / (kernelBandWidth * Math.sqrt(2. * Math.PI)); int kernelSize = (int) (Math.ceil(kernelBandWidth * 3) * 2 + 1); List<Double> gaussianValues = new ArrayList<Double>(kernelSize); for (double i = 0; i < kernelSize; i++) { gaussianValues.add(Math.exp(-Math.pow(i - kernelSize / 2, 2) / h) * v); } return gaussianValues; } /** * Convolve the histogram with a statistic kernel for a given timeseries * * @param timeseries Timeseries whose histogram is to convolve */ protected void computeConvolution(Timeseries timeseries) { List<Double> histogram = mapHistograms.get(timeseries); List<Double> smoothedHistogram = new ArrayList<Double>(); // TODO can throw a NullPointerException histogram.forEach(i -> smoothedHistogram.add(0.)); // compute the convolution of the time serie width the kernel List<Double> gaussianValues = gaussianKernel(); for (int i = 2; i < histogram.size(); i++) { for (int k = 0; k < gaussianValues.size(); k++) { int j = (int) Math.ceil(i + k - gaussianValues.size() / 2.); if (j < 0 || j > histogram.size() - 1) { continue; } smoothedHistogram.set(i, smoothedHistogram.get(i) + histogram.get(j) * gaussianValues.get(k)); } } mapSmoothedHistogram.put(timeseries, smoothedHistogram); } /** Compute the vertices for the layered rendering */ protected void computeStackedVertices() { mapVertices.clear(); // TODO can throw a NullPointerException double max = IntStream.range(0, mapSmoothedHistogram.get(timeseries.get(0)).size()) .mapToDouble(i -> mapSmoothedHistogram.values().stream().mapToDouble(h -> h.get(i)).sum()) .summaryStatistics() .getMax(); // put the first timeseries at the bottom Timeseries ts = timeseries.get(0); List<Double> smoothedHistogram = mapSmoothedHistogram.get(ts); List<Vertex> vertices = new ArrayList<Vertex>(); for (int i = 0; i < smoothedHistogram.size(); i++) { vertices.add(new Vertex(i, (1. - smoothedHistogram.get(i) / max * .8) * scaledHeight, ts.getColor())); vertices.add(new Vertex(i, scaledHeight)); } mapVertices.put(ts, vertices); // stack the other timeseries for (int i = 1; i < timeseries.size(); i++) { ts = timeseries.get(i); smoothedHistogram = mapSmoothedHistogram.get(ts); vertices = new ArrayList<Vertex>(); List<Vertex> aboveVertices = mapVertices.get(timeseries.get(i - 1)); for (int j = 0; j < smoothedHistogram.size(); j++) { vertices.add(new Vertex(aboveVertices.get(2 * j).x, aboveVertices.get(2 * j).y - smoothedHistogram.get(j) / max * .8 * scaledHeight, ts.getColor())); vertices.add(new Vertex(aboveVertices.get(2 * j).x, aboveVertices.get(2 * j).y, ts.getColor())); } mapVertices.put(ts, vertices); } // trim 3 times the kernel bandwidth at each side int toTrim = (int) (Math.round(3. * kernelBandWidthProperty.get() / 2.) * 2); mapVertices.forEach((t, vt) -> { vt = vt.subList(toTrim, vt.size() - toTrim); vt.forEach(v -> v.x -= toTrim); mapVertices.put(t, vt); }); } /** Compute the alpha values used for the SlickGraph shading */ protected void computeSlgAlphas() { slgAlphas.clear(); int toTrim = (int) (Math.round(3. * kernelBandWidthProperty.get() / 2.) * 2) / 2; IntStream.range(0, mapHistograms.get(timeseries.get(0)).size()).forEach(i -> { double vh = mapHistograms.values().stream() .mapToDouble(h -> h.get(i)) .sum(); double vsh = mapSmoothedHistogram.values().stream() .mapToDouble(sh -> sh.get(i)) .sum(); slgAlphas.add(vh == 0 ? 0. : 1. / (1. + vsh / vh)); }); slgAlphas = slgAlphas.subList(toTrim, slgAlphas.size() - toTrim); } /** Compute the vertices of the graph */ protected void computeVertices() { // nothing to do if not shown yet or not data if (canvas.getWidth() == 0. || canvas.getHeight() == 0. || timeseries.isEmpty()) { return; } // aggregate the timeseries timeseries.stream().forEach(ts -> { buildHistogram(ts); computeConvolution(ts); }); computeStackedVertices(); computeSlgAlphas(); } /** * Perform a zoom * * @param z Y-delta of the zoom */ public void zoom(double z) { double delta = 50. * (end - start) / scaledWidth; if (z > 0) { start += delta; end -= delta; } else { start -= delta; end += delta; } computeVertices(); render(); } /** * Perform a pan * * @param deltaX Horizontal displacement of the mouse cursor */ public void pan(double deltaX) { double delta = deltaX * (end - start) / scaledWidth; start += delta; end += delta; computeVertices(); render(); } /** Draw the graph */ protected void render() { // sanity check boolean verticesReady = true; int i = 0; while (i < timeseries.size() && verticesReady) { verticesReady = verticesReady && mapVertices.containsKey(timeseries.get(i)); i++; } if (timeseries == null || timeseries.isEmpty() || !verticesReady) { return; } GraphicsContext gc = canvas.getGraphicsContext2D(); // clear the canvas gc.setFill(Color.WHITE); gc.fillRect(0, 0, scaledWidth, scaledHeight); // render the shading if (showShadingProperty.get()) { // render the shading List<Vertex> vertices = mapVertices.get(timeseries.get(timeseries.size() - 1)); for (int v = 0; v < vertices.size() - 1; v += 2) { gc.setStroke(Color.rgb(0, 0, 0, slgAlphas.get(v / 2))); gc.strokeLine(vertices.get(v).x, vertices.get(v).y, vertices.get(v + 1).x, scaledHeight); } // render the curve if (showCurveProperty.get()) { gc.setStroke(curveColorProperty.get()); for (int j = 0; j < vertices.size() - 3; j += 2) { gc.strokeLine(vertices.get(j).x, vertices.get(j).y, vertices.get(j + 2).x, vertices.get(j + 2).y); } } } else { // render the times timeseries.forEach(ts -> { List<Vertex> vertices = mapVertices.get(ts); gc.setStroke(ts.getColor()); for (int v = 0; v < vertices.size() - 1; v += 2) { gc.strokeLine(vertices.get(v).x, vertices.get(v).y, vertices.get(v + 1).x, vertices.get(v + 1).y); } }); // render the curve if (showCurveProperty.get()) { gc.setStroke(curveColorProperty.get()); mapVertices.values().forEach(vertices -> { for (int j = 0; j < vertices.size() - 3; j += 2) { gc.strokeLine(vertices.get(j).x, vertices.get(j).y, vertices.get(j + 2).x, vertices.get(j + 2).y); } }); } } } }
slickgraph-core/src/main/java/fr/caladan/slickgraph/SlickGraph.java
package fr.caladan.slickgraph; import java.awt.Toolkit; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.stream.IntStream; import javafx.beans.InvalidationListener; import javafx.beans.property.DoubleProperty; import javafx.beans.property.SimpleBooleanProperty; import javafx.beans.property.SimpleDoubleProperty; import javafx.beans.property.SimpleObjectProperty; import javafx.collections.FXCollections; import javafx.collections.ListChangeListener; import javafx.collections.ObservableList; import javafx.event.EventHandler; import javafx.scene.Group; import javafx.scene.canvas.Canvas; import javafx.scene.canvas.GraphicsContext; import javafx.scene.input.InputEvent; import javafx.scene.input.MouseEvent; import javafx.scene.input.ScrollEvent; import javafx.scene.paint.Color; import javafx.stage.Screen; /** * Slick Graph is a binning and smoothing technique for time series visualization. * Slick Graphs mitigate quantization artifacts by using the smallest possible binning intervals, i.e. pixels. * They nonetheless provide smooth variations by using a convolution with a kernel. * The filtered-out information that would be lost by this smoothing step is encoded using the luminance channel. */ public class SlickGraph extends Group { /** Canvas used to render the timeseries */ protected Canvas canvas; public DoubleProperty widthProperty() { return canvas.widthProperty(); } public DoubleProperty heightProperty() { return canvas.heightProperty(); } /** Timeseries to render */ protected ObservableList<Timeseries> timeseries; public List<Timeseries> getTimeseries() { return timeseries; } public void setTimeseries(List<Timeseries> timeseries) { start = timeseries.get(0).getData().get(0); end = timeseries.get(0).getData().get(timeseries.get(0).getData().size() - 1); this.timeseries.clear(); this.timeseries.addAll(timeseries); } /** Bandwidth to use when computing the kernel estimation */ protected SimpleDoubleProperty kernelBandWidthProperty; public SimpleDoubleProperty kernelBandwidthProperty() { return kernelBandWidthProperty; } public void setKernelBandWidth(double kernelBandWidth) { kernelBandWidthProperty.set(Math.max(1., kernelBandWidth)); } public double getKernelBandWidth() { return kernelBandWidthProperty.get(); } /** Start timestamp */ protected double start; /** End timestamp */ protected double end; /** Histogram computed after aggregation based on the pixels */ protected Map<Timeseries, List<Double>> mapHistograms; /** Histogram values after the convolution */ protected Map<Timeseries, List<Double>> mapSmoothedHistogram; /** Vertices of the histogram */ protected List<Vertex> histogramVertices; /** Vertices of the graph */ protected Map<Timeseries, List<Vertex>> mapVertices; /** Horizontal scale factor */ protected SimpleDoubleProperty xScaleProperty; /** Vertical scale factor */ protected SimpleDoubleProperty yScaleProperty; /** Width (in physical pixels) of the canvas */ protected double scaledWidth; /** Height (in physical pixels) of the canvas */ protected double scaledHeight; /** Time cursor */ protected TimeCursor timeCursor; /** Time cursor visibility property */ protected SimpleBooleanProperty timeCursorVisibleProperty; public SimpleBooleanProperty timeCursorVisibleProperty() { return timeCursorVisibleProperty; } public boolean isTimeCursorVisible() { return timeCursorVisibleProperty.get(); } public void setTimeCursorVisible(boolean visible) { timeCursorVisibleProperty.set(visible); } /** Indicates whether hide or show the shading representing the difference between the smoothed and the real value */ protected SimpleBooleanProperty showShadingProperty; public SimpleBooleanProperty showShadingProperty() { return showShadingProperty; } public boolean isShadingShown() { return showShadingProperty.get(); } public void setShowShading(boolean showShading) { showShadingProperty.set(showShading); } /** Color of the timeseries outine */ protected SimpleObjectProperty<Color> curveColorProperty; public SimpleObjectProperty<Color> curveColorProperty() { return curveColorProperty; } public Color getCurveColor() { return curveColorProperty.get(); } public void setCurveColor(Color curveColor) { curveColorProperty.set(curveColor); } /** Indicates whether hide or show the curve of the graph */ protected SimpleBooleanProperty showCurveProperty; public SimpleBooleanProperty showCurveProperty() { return showCurveProperty; } public boolean isCurveShown() { return showCurveProperty.get(); } public void setShowCurve(boolean showCurve) { showCurveProperty.set(showCurve); } /** Public default constructor - initializes the properties */ public SlickGraph() { super(); canvas = new Canvas(); getChildren().add(canvas); timeseries = FXCollections.observableArrayList(); kernelBandWidthProperty = new SimpleDoubleProperty(5.0); mapHistograms = new HashMap<Timeseries, List<Double>>(); mapSmoothedHistogram = new HashMap<Timeseries, List<Double>>(); start = -1; end = -1; histogramVertices = new ArrayList<Vertex>(); mapVertices = new HashMap<Timeseries, List<Vertex>>(); xScaleProperty = new SimpleDoubleProperty(1.); yScaleProperty = new SimpleDoubleProperty(1.); timeCursor = new TimeCursor(); getChildren().add(timeCursor); timeCursorVisibleProperty = new SimpleBooleanProperty(true); showShadingProperty = new SimpleBooleanProperty(true); showCurveProperty = new SimpleBooleanProperty(true); curveColorProperty = new SimpleObjectProperty<Color>(Color.BLACK); // render the graph when a timeseries is added or removed timeseries.addListener((ListChangeListener.Change<? extends Timeseries> c) -> { computeVertices(); render(); }); canvas.widthProperty().addListener(e -> handleHiDPI()); canvas.heightProperty().addListener(e -> handleHiDPI()); timeCursor.getCursorLine().endYProperty().bind(canvas.heightProperty()); kernelBandWidthProperty.addListener(e -> { computeVertices(); render(); }); // mouse event for the time cursor EventHandler<? super InputEvent> mouseEventHandler = e -> { double x = e instanceof MouseEvent ? ((MouseEvent) e).getX() : ((ScrollEvent) e).getX(); int histogramSize = mapHistograms.get(timeseries.get(0)).size(); if (x < histogramSize) { double value = mapSmoothedHistogram.values().stream() .mapToDouble(h -> h.get((int) (x * xScaleProperty.get()) + (int) Math.floor(3. * kernelBandWidthProperty.get())) * (end - start) / scaledWidth) .sum(); timeCursor.setTooltipText("y = " + value + " "); } // TODO can throw a NullPointerException timeCursor.setPosition(x, mapVertices.get(timeseries.get(timeseries.size() - 1)).get((int) Math.round(x * 2. * xScaleProperty.get()) / 2 * 2).y / yScaleProperty.get()); }; addEventHandler(MouseEvent.MOUSE_MOVED, mouseEventHandler); addEventHandler(MouseEvent.MOUSE_DRAGGED, mouseEventHandler); addEventHandler(ScrollEvent.ANY, mouseEventHandler); timeCursor.visibleProperty().bind(timeCursorVisibleProperty); // bind the properties setting the visualization parameters InvalidationListener propertiesListener = e -> render(); showShadingProperty.addListener(propertiesListener); showCurveProperty.addListener(propertiesListener); curveColorProperty.addListener(propertiesListener); } /** * Constructor that initializes the size of the graph * * @param width Width of the graph * @param height Height of the graph */ public SlickGraph(double width, double height) { this(); canvas.setWidth(width); canvas.setHeight(height); } /** * Constructor that initializes the data to visualize * * @param data Data that represent the time serie to render * @throws Exception If the data is not valid (timestamps should be strictly increasing) */ public SlickGraph(List<Double> data) throws Exception { this(); List<Timeseries> timeseries = new ArrayList<Timeseries>(); timeseries.add(new Timeseries(data)); setTimeseries(timeseries); } /** * Constructor that initializes the size of the graph and the data to visualize * * @param width Width of the graph * @param height Height of the graph * @param data Data that represent the time serie to render * @throws Exception If the data is not valid (timestamps should be strictly increasing) */ public SlickGraph(double width, double height, List<Double> data) throws Exception { this(data); canvas.setWidth(width); canvas.setHeight(height); } /** Set the scale on the canvas to have a 1:1 pixel mapping */ protected void handleHiDPI() { double nativeWidth = Toolkit.getDefaultToolkit().getScreenSize().getWidth(); double nativeHeight = Toolkit.getDefaultToolkit().getScreenSize().getHeight(); double screenWidth = Screen.getPrimary().getVisualBounds().getWidth(); double screenHeight = Screen.getPrimary().getVisualBounds().getHeight(); double xScale = nativeWidth / screenWidth; double yScale = nativeHeight / screenHeight; scaledWidth = canvas.getWidth() * xScale; scaledHeight = canvas.getHeight() * yScale; // back to scale 1:1 canvas.getGraphicsContext2D().scale(xScaleProperty.get(), yScaleProperty.get()); // set the new scale canvas.getGraphicsContext2D().scale(1. / xScale, 1. / yScale); xScaleProperty.set(xScale); yScaleProperty.set(yScale); // update the view computeVertices(); render(); } /** * Return the list of the array of events corresponding to the bounds of the pixels in a given time window * * @param start Start timestamp of the time window * @param end End timestamp of the time window * @return */ protected double[] buildPixelBounds(double start, double end) { int toTrim = (int) Math.floor(6. * kernelBandWidthProperty.get()); double timeSliceDuration = (end - start) / (scaledWidth + toTrim); double[] pixelBounds = new double[(int) (scaledWidth + 1 + toTrim)]; for (int i = 0; i < pixelBounds.length; i++) { pixelBounds[i] = start + i * timeSliceDuration; } return pixelBounds; } /** * Compute the aggregation of a timeseries based on the pixels * * @param timeseries Timeseries to aggregate */ protected void buildHistogram(Timeseries timeseries) { List<Double> histogram = new ArrayList<Double>(); // build the timestamps at the pixels bounds double[] pixelBounds = buildPixelBounds(start, end); // build the list of indices that correspond to the pixel bounds List<Integer> listIndices = new ArrayList<Integer>(); for (int i = 0; i < pixelBounds.length; i++) { int boundEvent = Collections.binarySearch(timeseries.getData(), pixelBounds[i]); boundEvent = boundEvent >= 0 ? boundEvent : -boundEvent - 1; listIndices.add(boundEvent); } for (int i = 0; i < listIndices.size() - 1; i++) { histogram.add((listIndices.get(i + 1) - listIndices.get(i)) / (end - start) * scaledWidth); } mapHistograms.put(timeseries, histogram); } /** * Compute the Gaussian values * * @return Gaussian values */ protected List<Double> gaussianKernel() { double kernelBandWidth = kernelBandWidthProperty.get(); double h = 2. * kernelBandWidth * kernelBandWidth; double v = 1. / (kernelBandWidth * Math.sqrt(2. * Math.PI)); int kernelSize = (int) (Math.ceil(kernelBandWidth * 3) * 2 + 1); List<Double> gaussianValues = new ArrayList<Double>(kernelSize); for (double i = 0; i < kernelSize; i++) { gaussianValues.add(Math.exp(-Math.pow(i - kernelSize / 2, 2) / h) * v); } return gaussianValues; } /** * Convolve the histogram with a statistic kernel for a given timeseries * * @param timeseries Timeseries whose histogram is to convolve */ protected void computeConvolution(Timeseries timeseries) { List<Double> histogram = mapHistograms.get(timeseries); List<Double> smoothedHistogram = new ArrayList<Double>(); // TODO can throw a NullPointerException histogram.forEach(i -> smoothedHistogram.add(0.)); // compute the convolution of the time serie width the kernel List<Double> gaussianValues = gaussianKernel(); for (int i = 2; i < histogram.size(); i++) { for (int k = 0; k < gaussianValues.size(); k++) { int j = (int) Math.ceil(i + k - gaussianValues.size() / 2.); if (j < 0 || j > histogram.size() - 1) { continue; } smoothedHistogram.set(i, smoothedHistogram.get(i) + histogram.get(j) * gaussianValues.get(k)); } } mapSmoothedHistogram.put(timeseries, smoothedHistogram); } /** Compute the vertices for the layered rendering */ protected void computeStackedVertices() { mapVertices.clear(); // TODO can throw a NullPointerException double max = IntStream.range(0, mapSmoothedHistogram.get(timeseries.get(0)).size()) .mapToDouble(i -> mapSmoothedHistogram.values().stream().mapToDouble(h -> h.get(i)).sum()) .summaryStatistics() .getMax(); // put the first timeseries at the bottom Timeseries ts = timeseries.get(0); List<Double> smoothedHistogram = mapSmoothedHistogram.get(ts); List<Vertex> vertices = new ArrayList<Vertex>(); for (int i = 0; i < smoothedHistogram.size(); i++) { vertices.add(new Vertex(i, (1. - smoothedHistogram.get(i) / max * .8) * scaledHeight, ts.getColor())); vertices.add(new Vertex(i, scaledHeight)); } mapVertices.put(ts, vertices); // stack the other timeseries for (int i = 1; i < timeseries.size(); i++) { ts = timeseries.get(i); smoothedHistogram = mapSmoothedHistogram.get(ts); vertices = new ArrayList<Vertex>(); List<Vertex> aboveVertices = mapVertices.get(timeseries.get(i - 1)); for (int j = 0; j < smoothedHistogram.size(); j++) { vertices.add(new Vertex(aboveVertices.get(2 * j).x, aboveVertices.get(2 * j).y - smoothedHistogram.get(j) / max * .8 * scaledHeight, ts.getColor())); vertices.add(new Vertex(aboveVertices.get(2 * j).x, aboveVertices.get(2 * j).y, ts.getColor())); } mapVertices.put(ts, vertices); } // trim 3 times the kernel bandwidth at each side int toTrim = (int) (Math.round(3. * kernelBandWidthProperty.get() / 2.) * 2); mapVertices.forEach((t, vt) -> { vt = vt.subList(toTrim, vt.size() - toTrim); vt.forEach(v -> v.x -= toTrim); mapVertices.put(t, vt); }); } /** Compute the vertices of the graph */ protected void computeVertices() { // nothing to do if not shown yet or not data if (canvas.getWidth() == 0. || canvas.getHeight() == 0. || timeseries.isEmpty()) { return; } // aggregate the timeseries timeseries.parallelStream().forEach(ts -> { buildHistogram(ts); computeConvolution(ts); }); computeStackedVertices(); } /** * Perform a zoom * * @param z Y-delta of the zoom */ public void zoom(double z) { double delta = 50. * (end - start) / scaledWidth; if (z > 0) { start += delta; end -= delta; } else { start -= delta; end += delta; } computeVertices(); render(); } /** * Perform a pan * * @param deltaX Horizontal displacement of the mouse cursor */ public void pan(double deltaX) { double delta = deltaX * (end - start) / scaledWidth; start += delta; end += delta; computeVertices(); render(); } /** Draw the graph */ protected void render() { // sanity check boolean verticesReady = true; int i = 0; while (i < timeseries.size() && verticesReady) { verticesReady = verticesReady && mapVertices.containsKey(timeseries.get(i)); i++; } if (timeseries == null || timeseries.isEmpty() || !verticesReady) { return; } GraphicsContext gc = canvas.getGraphicsContext2D(); // clear the canvas gc.setFill(Color.WHITE); gc.fillRect(0, 0, scaledWidth, scaledHeight); // render the shading /* if (showShadingProperty.get()) { List<Vertex> vertices = mapVertices.get(timeseries.get(2)); vertices.forEach(v -> { gc.setStroke(v.color); gc.strokeLine(v.x, scaledHeight, v.x, v.y); }); } */ timeseries.forEach(ts -> { List<Vertex> vertices = mapVertices.get(ts); gc.setStroke(ts.getColor()); for (int v = 0; v < vertices.size() - 1; v += 2) { gc.strokeLine(vertices.get(v).x, vertices.get(v).y, vertices.get(v + 1).x, vertices.get(v + 1).y); } }); /* mapVertices.forEach((timeseries, vertices) -> { vertices.forEach(v -> { // System.out.println(v); gc.setStroke(v.color); gc.strokeLine(v.x, scaledHeight, v.x, v.y); }); }); */ // render the curve if (showCurveProperty.get()) { gc.setStroke(Color.BLACK); mapVertices.values().forEach(vertices -> { for (int j = 0; j < vertices.size() - 3; j += 2) { gc.strokeLine(vertices.get(j).x, vertices.get(j).y, vertices.get(j + 2).x, vertices.get(j + 2).y); } }); } } }
Implemented SLG shading rendering for multiple timeseries
slickgraph-core/src/main/java/fr/caladan/slickgraph/SlickGraph.java
Implemented SLG shading rendering for multiple timeseries
<ide><path>lickgraph-core/src/main/java/fr/caladan/slickgraph/SlickGraph.java <ide> showShadingProperty.set(showShading); <ide> } <ide> <add> /** List of alpha values for the SlickGraph shading */ <add> protected List<Double> slgAlphas; <add> <ide> /** Color of the timeseries outine */ <ide> protected SimpleObjectProperty<Color> curveColorProperty; <ide> public SimpleObjectProperty<Color> curveColorProperty() { <ide> getChildren().add(timeCursor); <ide> timeCursorVisibleProperty = new SimpleBooleanProperty(true); <ide> showShadingProperty = new SimpleBooleanProperty(true); <add> slgAlphas = new ArrayList<Double>(); <ide> showCurveProperty = new SimpleBooleanProperty(true); <ide> curveColorProperty = new SimpleObjectProperty<Color>(Color.BLACK); <ide> <ide> double value = mapSmoothedHistogram.values().stream() <ide> .mapToDouble(h -> h.get((int) (x * xScaleProperty.get()) + (int) Math.floor(3. * kernelBandWidthProperty.get())) * (end - start) / scaledWidth) <ide> .sum(); <del> timeCursor.setTooltipText("y = " + value + " "); <add> timeCursor.setTooltipText(" y = " + value + " "); <ide> } <ide> // TODO can throw a NullPointerException <del> timeCursor.setPosition(x, mapVertices.get(timeseries.get(timeseries.size() - 1)).get((int) Math.round(x * 2. * xScaleProperty.get()) / 2 * 2).y / yScaleProperty.get()); <add> int toTrim = (int) (Math.round(3. * kernelBandWidthProperty.get() / 2.) * 2); <add> timeCursor.setPosition(x, mapVertices.get(timeseries.get(timeseries.size() - 1)).get((int) Math.round(x * 2. * xScaleProperty.get()) / 2 * 2 + toTrim).y / yScaleProperty.get()); <ide> }; <ide> addEventHandler(MouseEvent.MOUSE_MOVED, mouseEventHandler); <ide> addEventHandler(MouseEvent.MOUSE_DRAGGED, mouseEventHandler); <ide> }); <ide> } <ide> <add> /** Compute the alpha values used for the SlickGraph shading */ <add> protected void computeSlgAlphas() { <add> slgAlphas.clear(); <add> <add> int toTrim = (int) (Math.round(3. * kernelBandWidthProperty.get() / 2.) * 2) / 2; <add> IntStream.range(0, mapHistograms.get(timeseries.get(0)).size()).forEach(i -> { <add> double vh = mapHistograms.values().stream() <add> .mapToDouble(h -> h.get(i)) <add> .sum(); <add> double vsh = mapSmoothedHistogram.values().stream() <add> .mapToDouble(sh -> sh.get(i)) <add> .sum(); <add> <add> slgAlphas.add(vh == 0 ? 0. : 1. / (1. + vsh / vh)); <add> }); <add> <add> slgAlphas = slgAlphas.subList(toTrim, slgAlphas.size() - toTrim); <add> } <add> <ide> /** Compute the vertices of the graph */ <ide> protected void computeVertices() { <ide> // nothing to do if not shown yet or not data <ide> } <ide> <ide> // aggregate the timeseries <del> timeseries.parallelStream().forEach(ts -> { <add> timeseries.stream().forEach(ts -> { <ide> buildHistogram(ts); <ide> computeConvolution(ts); <ide> }); <ide> <ide> computeStackedVertices(); <add> computeSlgAlphas(); <ide> } <ide> <ide> /** <ide> gc.fillRect(0, 0, scaledWidth, scaledHeight); <ide> <ide> // render the shading <del> /* if (showShadingProperty.get()) { <del> List<Vertex> vertices = mapVertices.get(timeseries.get(2)); <del> vertices.forEach(v -> { <del> gc.setStroke(v.color); <del> gc.strokeLine(v.x, scaledHeight, v.x, v.y); <del> }); <del> } */ <del> <del> timeseries.forEach(ts -> { <del> List<Vertex> vertices = mapVertices.get(ts); <del> gc.setStroke(ts.getColor()); <add> if (showShadingProperty.get()) { <add> // render the shading <add> List<Vertex> vertices = mapVertices.get(timeseries.get(timeseries.size() - 1)); <ide> for (int v = 0; v < vertices.size() - 1; v += 2) { <del> gc.strokeLine(vertices.get(v).x, vertices.get(v).y, vertices.get(v + 1).x, vertices.get(v + 1).y); <add> gc.setStroke(Color.rgb(0, 0, 0, slgAlphas.get(v / 2))); <add> gc.strokeLine(vertices.get(v).x, vertices.get(v).y, vertices.get(v + 1).x, scaledHeight); <ide> } <del> }); <del> <del> /* mapVertices.forEach((timeseries, vertices) -> { <del> vertices.forEach(v -> { <del> // System.out.println(v); <del> gc.setStroke(v.color); <del> gc.strokeLine(v.x, scaledHeight, v.x, v.y); <del> }); <del> }); */ <del> <del> // render the curve <del> if (showCurveProperty.get()) { <del> gc.setStroke(Color.BLACK); <del> mapVertices.values().forEach(vertices -> { <add> <add> // render the curve <add> if (showCurveProperty.get()) { <add> gc.setStroke(curveColorProperty.get()); <ide> for (int j = 0; j < vertices.size() - 3; j += 2) { <ide> gc.strokeLine(vertices.get(j).x, vertices.get(j).y, vertices.get(j + 2).x, vertices.get(j + 2).y); <ide> } <add> } <add> } else { <add> // render the times <add> timeseries.forEach(ts -> { <add> List<Vertex> vertices = mapVertices.get(ts); <add> gc.setStroke(ts.getColor()); <add> for (int v = 0; v < vertices.size() - 1; v += 2) { <add> gc.strokeLine(vertices.get(v).x, vertices.get(v).y, vertices.get(v + 1).x, vertices.get(v + 1).y); <add> } <ide> }); <add> <add> // render the curve <add> if (showCurveProperty.get()) { <add> gc.setStroke(curveColorProperty.get()); <add> mapVertices.values().forEach(vertices -> { <add> for (int j = 0; j < vertices.size() - 3; j += 2) { <add> gc.strokeLine(vertices.get(j).x, vertices.get(j).y, vertices.get(j + 2).x, vertices.get(j + 2).y); <add> } <add> }); <add> } <ide> } <ide> } <ide>
Java
mit
528fdd6690919a03fe0d97c85c6666492c6ffa93
0
OliverAbdulrahim/Hangman,zorudeam/Hangman
package game; import java.awt.Color; import java.awt.Component; import java.awt.Container; import java.awt.Dimension; import java.awt.Font; import java.awt.GridBagConstraints; import java.awt.GridBagLayout; import java.awt.Insets; import java.awt.Toolkit; import java.awt.Window; import java.awt.event.ActionEvent; import java.awt.event.KeyAdapter; import java.awt.event.KeyEvent; import java.text.NumberFormat; import java.util.ArrayList; import java.util.EventObject; import java.util.List; import java.util.function.Consumer; import java.util.logging.Level; import java.util.logging.Logger; import java.util.stream.Stream; import javafx.scene.control.ColorPicker; import javax.swing.AbstractButton; import javax.swing.BorderFactory; import javax.swing.ButtonGroup; import javax.swing.DefaultComboBoxModel; import javax.swing.DefaultListModel; import javax.swing.GroupLayout; import javax.swing.JButton; import javax.swing.JComboBox; import javax.swing.JFrame; import javax.swing.JLabel; import javax.swing.JList; import javax.swing.JMenu; import javax.swing.JMenuBar; import javax.swing.JMenuItem; import javax.swing.JOptionPane; import javax.swing.JPanel; import javax.swing.JPopupMenu; import javax.swing.JRadioButton; import javax.swing.JScrollPane; import javax.swing.JTextField; import javax.swing.LayoutStyle; import javax.swing.SwingConstants; import javax.swing.SwingUtilities; import javax.swing.UIManager; import javax.swing.UnsupportedLookAndFeelException; import javax.swing.WindowConstants; import language.Word; import language.WordProperties; import utilities.functions.StringUtilities; /** * The {@code Hangman_GUI} class provides for a user interface for the * {@code hangman} package. * * @author Oliver Abdulrahim */ public class Hangman_GUI extends JFrame { /** * The cerealVersionUID for this class. */ private static final long serialVersionUID = -4227892083846427803L; private JButton aButton; private JComboBox<String> actorComboBox; private JLabel actorLabel; private JButton bButton; private JButton cButton; private JLabel currentWordLabel; private JPanel currentPanel; private JButton dButton; private JList<Word> dictionaryList; private ButtonGroup difficultyButtonGroup; private JLabel difficultyLabel; private JButton eButton; private JRadioButton easyRadioButton; private JPanel editPanel; private JButton fButton; private JMenu fileMenu; private JMenuItem fileMenuItem; private JPopupMenu.Separator fileSeparator; private JButton gButton; private JPanel gamePanel; private JButton giveUpButton; private JTextField guessedField; private JLabel guessedLabel; private JTextField guessesLeftField; private JLabel guessesLeftLabel; private JButton hButton; private JRadioButton hardRadioButton; private JButton hintButton; private JButton iButton; private JLabel imageLabel; private JButton jButton; private JButton kButton; private JPanel keyboardPanel; private JButton lButton; private JButton mButton; private JRadioButton mediumRadioButton; private JMenuBar menuBar; private JButton nButton; private JMenuItem newMenuItem; private JButton newWordButton; private JButton oButton; private JButton okButton; private JPanel okPanel; private JButton pButton; private JButton qButton; private JButton rButton; private JMenuItem resetGameMenuItem; private JButton sButton; private JFrame settingsFrame; private JMenu settingsMenu; private JMenuItem settingsMenuItem; private JPanel statisticsPanel; private JButton tButton; private JButton uButton; private JButton vButton; private JButton wButton; private JTextField winRateField; private JLabel winRateLabel; private JPanel wordDisplayPanel; private JScrollPane wordScrollPane; private JButton xButton; private JButton yButton; private JButton zButton; private final Hangman game; private int gamesPlayed; private int gamesWon; /** * Creates new, default {@code Hangman_GUI} form. */ public Hangman_GUI() { game = new Hangman(); gamesPlayed = 0; gamesWon = 0; initComponents(); addTypingListeners(); addButtonListeners(); setUpDictionaryList(); } /** * Adds action listeners for keyboard input to each component contained * within this object. */ private void addTypingListeners() { applyToAll((Component c) -> { c.addKeyListener(new KeyAdapter() { @Override public void keyReleased(KeyEvent evt) { parseGuess(evt); } }); }); } /** * Adds action listeners for click input to each button on the soft keyboard * of the GUI. */ private void addButtonListeners() { // keyboardPanel only has JButtons - okay to cast applyTo(keyboardPanel, (Component c) -> { ((AbstractButton) c).addActionListener(this :: parseGuess); }); } /** * Recursively returns all components within a given container, including * any children that are also {@code Container}s. * * @param container The component whose components to retrieve. * @return All components within a given container. */ public static List<Component> getAllComponents(Container container) { Component[] components = container.getComponents(); List<Component> compList = new ArrayList<>(); for (Component c : components) { compList.add(c); if (c instanceof Container) { compList.addAll(getAllComponents((Container) c)); } } return compList; } /** * Applies a given {@code Consumer} to every {@code Component} contained in * this object. * * @param action The {@code Consumer} to apply to every {@code Component} * contained in this object. */ private void applyToAll(Consumer<? super Component> action) { List<Component> allComponents = getAllComponents(this); allComponents.stream().forEach(action); } /** * Applies a given {@code Consumer} to every {@code Component} contained in * the specified {@code Container}. * * @param action The {@code Consumer} to apply to every {@code Component} * contained in the specified {@code Container}. */ private static void applyTo(Container container, Consumer<? super Component> action) { Stream.of(container.getComponents()).forEach(action); } /** * Enables or disables all components contained within this object. * * @param state The state to set every {@code Component} to. */ public void setStateOfAll(boolean state) { applyToAll((Component c) -> { c.setEnabled(state); }); } /** * Enables or disables all components contained within the given * {@code Container}. * * @param container The {@code Container} whose components to set the state * of. * @param state The state to set every {@code Component} to. */ public static void setStateOf(Container container, boolean state) { applyTo(container, (Component c) -> { c.setEnabled(state); }); } /** * Sets up the list of words that display each item in the game's * dictionary. */ private void setUpDictionaryList() { dictionaryList.setModel(new DefaultListModel<Word>() { private static final long serialVersionUID = 938467039846L; @Override public int size() { return game.getWords().cacheList().size(); } @Override public int getSize() { return size(); } @Override public Word get(int index) { return game.getWords().cacheList().get(index); } @Override public Word elementAt(int index) { return get(index); } @Override public Word getElementAt(int i) { return get(i); } @Override public Word remove(int index) { return game.getWords().cacheList().remove(index); } @Override public void removeElementAt(int index) { remove(index); } @Override public void add(int index, Word element) { game.getWords().cacheList().add(index, element); } @Override public void addElement(Word element) { add(0, element); } }); } /** * Called from within the constructor to initialize the form. */ private void initComponents() { GridBagConstraints gridBagConstraints; settingsFrame = new JFrame(); okPanel = new JPanel(); okButton = new JButton(); wordDisplayPanel = new JPanel(); wordScrollPane = new JScrollPane(); dictionaryList = new JList<>(); editPanel = new JPanel(); difficultyLabel = new JLabel(); easyRadioButton = new JRadioButton(); mediumRadioButton = new JRadioButton(); hardRadioButton = new JRadioButton(); actorLabel = new JLabel(); actorComboBox = new JComboBox<>(); difficultyButtonGroup = new ButtonGroup(); keyboardPanel = new JPanel(); qButton = new JButton(); wButton = new JButton(); eButton = new JButton(); rButton = new JButton(); tButton = new JButton(); yButton = new JButton(); uButton = new JButton(); iButton = new JButton(); oButton = new JButton(); pButton = new JButton(); aButton = new JButton(); sButton = new JButton(); dButton = new JButton(); fButton = new JButton(); gButton = new JButton(); hButton = new JButton(); jButton = new JButton(); kButton = new JButton(); lButton = new JButton(); zButton = new JButton(); xButton = new JButton(); cButton = new JButton(); vButton = new JButton(); bButton = new JButton(); nButton = new JButton(); mButton = new JButton(); gamePanel = new JPanel(); imageLabel = new JLabel(); currentPanel = new JPanel(); currentWordLabel = new JLabel(); statisticsPanel = new JPanel(); guessesLeftLabel = new JLabel(); guessesLeftField = new JTextField(); winRateLabel = new JLabel(); winRateField = new JTextField(); giveUpButton = new JButton(); guessedField = new JTextField(); guessedLabel = new JLabel(); hintButton = new JButton(); newWordButton = new JButton(); menuBar = new JMenuBar(); fileMenu = new JMenu(); newMenuItem = new JMenuItem(); resetGameMenuItem = new JMenuItem(); fileSeparator = new JPopupMenu.Separator(); fileMenuItem = new JMenuItem(); settingsMenu = new JMenu(); settingsMenuItem = new JMenuItem(); settingsFrame.setDefaultCloseOperation(WindowConstants.DISPOSE_ON_CLOSE); settingsFrame.setTitle("Settings"); settingsFrame.setLocationByPlatform(true); settingsFrame.setMaximumSize(new Dimension(374, 309)); settingsFrame.setMinimumSize(new Dimension(374, 309)); settingsFrame.setPreferredSize(new Dimension(374, 309)); settingsFrame.setResizable(false); settingsFrame.setType(Window.Type.POPUP); okPanel.setBorder(BorderFactory.createTitledBorder("")); okButton.setText("OK"); okButton.addActionListener((ActionEvent e) -> { initGame(); settingsFrame.dispose(); }); GroupLayout okPanelLayout = new GroupLayout(okPanel); okPanel.setLayout(okPanelLayout); okPanelLayout.setHorizontalGroup( okPanelLayout.createParallelGroup(GroupLayout.Alignment.LEADING) .addGroup(okPanelLayout.createSequentialGroup() .addGap(113, 113, 113) .addComponent(okButton, GroupLayout.PREFERRED_SIZE, 115, GroupLayout.PREFERRED_SIZE) .addContainerGap(GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)) ); okPanelLayout.setVerticalGroup( okPanelLayout.createParallelGroup(GroupLayout.Alignment.LEADING) .addGroup(okPanelLayout.createSequentialGroup() .addContainerGap() .addComponent(okButton) .addContainerGap(GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)) ); wordDisplayPanel.setBorder(BorderFactory.createTitledBorder("Dictionary Words")); wordDisplayPanel.setToolTipText(""); dictionaryList.setFont(new Font("Consolas", 0, 11)); dictionaryList.setToolTipText(NumberFormat.getInstance().format(game.getWords().size()) + " words in this dictionary."); wordScrollPane.setViewportView(dictionaryList); GroupLayout wordDisplayPanelLayout = new GroupLayout(wordDisplayPanel); wordDisplayPanel.setLayout(wordDisplayPanelLayout); wordDisplayPanelLayout.setHorizontalGroup( wordDisplayPanelLayout.createParallelGroup(GroupLayout.Alignment.LEADING) .addGroup(wordDisplayPanelLayout.createSequentialGroup() .addContainerGap() .addComponent(wordScrollPane, GroupLayout.DEFAULT_SIZE, 177, Short.MAX_VALUE) .addContainerGap()) ); wordDisplayPanelLayout.setVerticalGroup( wordDisplayPanelLayout.createParallelGroup(GroupLayout.Alignment.LEADING) .addGroup(wordDisplayPanelLayout.createSequentialGroup() .addContainerGap() .addComponent(wordScrollPane) .addContainerGap()) ); editPanel.setBorder(BorderFactory.createTitledBorder("Game Options")); editPanel.setToolTipText(""); difficultyLabel.setText("<html><p>Select word difficulty.</p></html>"); difficultyButtonGroup.add(easyRadioButton); easyRadioButton.setText("Easy"); easyRadioButton.addActionListener((e) -> updateGameSettings()); difficultyButtonGroup.add(mediumRadioButton); mediumRadioButton.setSelected(true); mediumRadioButton.setText("Medium"); mediumRadioButton.addActionListener((e) -> updateGameSettings()); difficultyButtonGroup.add(hardRadioButton); hardRadioButton.setText("Hard"); hardRadioButton.addActionListener((e) -> updateGameSettings()); actorLabel.setText("<html><p>Select a set of images to use.</p></html>"); actorComboBox.setModel(new DefaultComboBoxModel<>(Actor.allNames())); GroupLayout editPanelLayout = new GroupLayout(editPanel); editPanel.setLayout(editPanelLayout); editPanelLayout.setHorizontalGroup( editPanelLayout.createParallelGroup(GroupLayout.Alignment.LEADING) .addGroup(editPanelLayout.createSequentialGroup() .addContainerGap() .addGroup(editPanelLayout.createParallelGroup(GroupLayout.Alignment.LEADING, false) .addComponent(actorComboBox, 0, 104, Short.MAX_VALUE) .addComponent(hardRadioButton, GroupLayout.DEFAULT_SIZE, GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) .addComponent(easyRadioButton, GroupLayout.DEFAULT_SIZE, GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) .addComponent(difficultyLabel) .addComponent(mediumRadioButton, GroupLayout.DEFAULT_SIZE, GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) .addComponent(actorLabel, GroupLayout.PREFERRED_SIZE, 0, Short.MAX_VALUE)) .addContainerGap(GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)) ); editPanelLayout.linkSize(SwingConstants.HORIZONTAL, new Component[] {actorComboBox, difficultyLabel, easyRadioButton, hardRadioButton, mediumRadioButton}); editPanelLayout.setVerticalGroup( editPanelLayout.createParallelGroup(GroupLayout.Alignment.LEADING) .addGroup(GroupLayout.Alignment.TRAILING, editPanelLayout.createSequentialGroup() .addContainerGap() .addComponent(difficultyLabel, GroupLayout.PREFERRED_SIZE, GroupLayout.DEFAULT_SIZE, GroupLayout.PREFERRED_SIZE) .addPreferredGap(LayoutStyle.ComponentPlacement.RELATED) .addComponent(easyRadioButton) .addPreferredGap(LayoutStyle.ComponentPlacement.RELATED) .addComponent(mediumRadioButton) .addPreferredGap(LayoutStyle.ComponentPlacement.RELATED) .addComponent(hardRadioButton) .addGap(13, 13, 13) .addComponent(actorLabel, GroupLayout.PREFERRED_SIZE, GroupLayout.DEFAULT_SIZE, GroupLayout.PREFERRED_SIZE) .addPreferredGap(LayoutStyle.ComponentPlacement.UNRELATED) .addComponent(actorComboBox, GroupLayout.PREFERRED_SIZE, GroupLayout.DEFAULT_SIZE, GroupLayout.PREFERRED_SIZE) .addContainerGap(13, Short.MAX_VALUE)) ); editPanelLayout.linkSize(SwingConstants.VERTICAL, new Component[] {easyRadioButton, hardRadioButton, mediumRadioButton}); GroupLayout settingsFrameLayout = new GroupLayout(settingsFrame.getContentPane()); settingsFrame.getContentPane().setLayout(settingsFrameLayout); settingsFrameLayout.setHorizontalGroup( settingsFrameLayout.createParallelGroup(GroupLayout.Alignment.LEADING) .addGroup(settingsFrameLayout.createSequentialGroup() .addContainerGap() .addGroup(settingsFrameLayout.createParallelGroup(GroupLayout.Alignment.LEADING, false) .addComponent(okPanel, GroupLayout.DEFAULT_SIZE, GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) .addGroup(settingsFrameLayout.createSequentialGroup() .addComponent(wordDisplayPanel, GroupLayout.PREFERRED_SIZE, GroupLayout.DEFAULT_SIZE, GroupLayout.PREFERRED_SIZE) .addPreferredGap(LayoutStyle.ComponentPlacement.RELATED) .addComponent(editPanel, GroupLayout.PREFERRED_SIZE, 131, GroupLayout.PREFERRED_SIZE))) .addContainerGap(GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)) ); settingsFrameLayout.setVerticalGroup( settingsFrameLayout.createParallelGroup(GroupLayout.Alignment.LEADING) .addGroup(settingsFrameLayout.createSequentialGroup() .addContainerGap() .addGroup(settingsFrameLayout.createParallelGroup(GroupLayout.Alignment.LEADING) .addComponent(wordDisplayPanel, GroupLayout.DEFAULT_SIZE, GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) .addComponent(editPanel, GroupLayout.DEFAULT_SIZE, GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)) .addPreferredGap(LayoutStyle.ComponentPlacement.RELATED) .addComponent(okPanel, GroupLayout.PREFERRED_SIZE, GroupLayout.DEFAULT_SIZE, GroupLayout.PREFERRED_SIZE) .addContainerGap()) ); setDefaultCloseOperation(WindowConstants.EXIT_ON_CLOSE); setTitle("Hangman"); setLocationByPlatform(true); setResizable(false); addKeyListener(new KeyAdapter() { @Override public void keyReleased(KeyEvent evt) { parseGuess(evt); } }); keyboardPanel.setBorder(BorderFactory.createTitledBorder("Keyboard")); keyboardPanel.setEnabled(false); keyboardPanel.setLayout(new GridBagLayout()); qButton.setFont(new Font("Consolas", 0, 12)); qButton.setText("Q"); qButton.setEnabled(false); gridBagConstraints = new GridBagConstraints(); gridBagConstraints.gridx = 0; gridBagConstraints.gridy = 0; keyboardPanel.add(qButton, gridBagConstraints); wButton.setFont(new Font("Consolas", 0, 12)); wButton.setText("W"); wButton.setEnabled(false); gridBagConstraints = new GridBagConstraints(); gridBagConstraints.gridx = 1; gridBagConstraints.gridy = 0; keyboardPanel.add(wButton, gridBagConstraints); eButton.setFont(new Font("Consolas", 0, 12)); eButton.setText("E"); eButton.setEnabled(false); gridBagConstraints = new GridBagConstraints(); gridBagConstraints.gridx = 2; gridBagConstraints.gridy = 0; keyboardPanel.add(eButton, gridBagConstraints); rButton.setFont(new Font("Consolas", 0, 12)); rButton.setText("R"); rButton.setEnabled(false); gridBagConstraints = new GridBagConstraints(); gridBagConstraints.gridx = 3; gridBagConstraints.gridy = 0; keyboardPanel.add(rButton, gridBagConstraints); tButton.setFont(new Font("Consolas", 0, 12)); tButton.setText("T"); tButton.setEnabled(false); gridBagConstraints = new GridBagConstraints(); gridBagConstraints.gridx = 4; gridBagConstraints.gridy = 0; keyboardPanel.add(tButton, gridBagConstraints); yButton.setFont(new Font("Consolas", 0, 12)); yButton.setText("Y"); yButton.setEnabled(false); gridBagConstraints = new GridBagConstraints(); gridBagConstraints.gridx = 5; gridBagConstraints.gridy = 0; keyboardPanel.add(yButton, gridBagConstraints); uButton.setFont(new Font("Consolas", 0, 12)); uButton.setText("U"); uButton.setEnabled(false); gridBagConstraints = new GridBagConstraints(); gridBagConstraints.gridx = 6; gridBagConstraints.gridy = 0; keyboardPanel.add(uButton, gridBagConstraints); iButton.setFont(new Font("Consolas", 0, 12)); iButton.setText("I"); iButton.setEnabled(false); gridBagConstraints = new GridBagConstraints(); gridBagConstraints.gridx = 7; gridBagConstraints.gridy = 0; keyboardPanel.add(iButton, gridBagConstraints); oButton.setFont(new Font("Consolas", 0, 12)); oButton.setText("O"); oButton.setEnabled(false); gridBagConstraints = new GridBagConstraints(); gridBagConstraints.gridx = 8; gridBagConstraints.gridy = 0; keyboardPanel.add(oButton, gridBagConstraints); pButton.setFont(new Font("Consolas", 0, 12)); pButton.setText("P"); pButton.setEnabled(false); gridBagConstraints = new GridBagConstraints(); gridBagConstraints.gridx = 9; gridBagConstraints.gridy = 0; keyboardPanel.add(pButton, gridBagConstraints); aButton.setFont(new Font("Consolas", 0, 12)); aButton.setText("A"); aButton.setEnabled(false); gridBagConstraints = new GridBagConstraints(); gridBagConstraints.gridx = 1; gridBagConstraints.gridy = 1; gridBagConstraints.insets = new Insets(0, -35, 0, 0); keyboardPanel.add(aButton, gridBagConstraints); sButton.setFont(new Font("Consolas", 0, 12)); sButton.setText("S"); sButton.setEnabled(false); gridBagConstraints = new GridBagConstraints(); gridBagConstraints.gridx = 2; gridBagConstraints.gridy = 1; gridBagConstraints.insets = new Insets(0, -35, 0, 0); keyboardPanel.add(sButton, gridBagConstraints); dButton.setFont(new Font("Consolas", 0, 12)); dButton.setText("D"); dButton.setEnabled(false); gridBagConstraints = new GridBagConstraints(); gridBagConstraints.gridx = 3; gridBagConstraints.gridy = 1; gridBagConstraints.insets = new Insets(0, -35, 0, 0); keyboardPanel.add(dButton, gridBagConstraints); fButton.setFont(new Font("Consolas", 0, 12)); fButton.setText("F"); fButton.setEnabled(false); gridBagConstraints = new GridBagConstraints(); gridBagConstraints.gridx = 4; gridBagConstraints.gridy = 1; gridBagConstraints.insets = new Insets(0, -35, 0, 0); keyboardPanel.add(fButton, gridBagConstraints); gButton.setFont(new Font("Consolas", 0, 12)); gButton.setText("G"); gButton.setEnabled(false); gridBagConstraints = new GridBagConstraints(); gridBagConstraints.gridx = 5; gridBagConstraints.gridy = 1; gridBagConstraints.insets = new Insets(0, -35, 0, 0); keyboardPanel.add(gButton, gridBagConstraints); hButton.setFont(new Font("Consolas", 0, 12)); hButton.setText("H"); hButton.setEnabled(false); gridBagConstraints = new GridBagConstraints(); gridBagConstraints.gridx = 6; gridBagConstraints.gridy = 1; gridBagConstraints.insets = new Insets(0, -35, 0, 0); keyboardPanel.add(hButton, gridBagConstraints); jButton.setFont(new Font("Consolas", 0, 12)); jButton.setText("J"); jButton.setEnabled(false); gridBagConstraints = new GridBagConstraints(); gridBagConstraints.gridx = 7; gridBagConstraints.gridy = 1; gridBagConstraints.insets = new Insets(0, -35, 0, 0); keyboardPanel.add(jButton, gridBagConstraints); kButton.setFont(new Font("Consolas", 0, 12)); kButton.setText("K"); kButton.setEnabled(false); gridBagConstraints = new GridBagConstraints(); gridBagConstraints.gridx = 8; gridBagConstraints.gridy = 1; gridBagConstraints.insets = new Insets(0, -35, 0, 0); keyboardPanel.add(kButton, gridBagConstraints); lButton.setFont(new Font("Consolas", 0, 12)); lButton.setText("L"); lButton.setEnabled(false); gridBagConstraints = new GridBagConstraints(); gridBagConstraints.gridx = 9; gridBagConstraints.gridy = 1; gridBagConstraints.insets = new Insets(0, -35, 0, 0); keyboardPanel.add(lButton, gridBagConstraints); zButton.setFont(new Font("Consolas", 0, 12)); zButton.setText("Z"); zButton.setEnabled(false); gridBagConstraints = new GridBagConstraints(); gridBagConstraints.gridx = 2; gridBagConstraints.gridy = 2; gridBagConstraints.insets = new Insets(0, -35, 0, 0); keyboardPanel.add(zButton, gridBagConstraints); xButton.setFont(new Font("Consolas", 0, 12)); xButton.setText("X"); xButton.setEnabled(false); gridBagConstraints = new GridBagConstraints(); gridBagConstraints.gridx = 3; gridBagConstraints.gridy = 2; gridBagConstraints.insets = new Insets(0, -35, 0, 0); keyboardPanel.add(xButton, gridBagConstraints); cButton.setFont(new Font("Consolas", 0, 12)); cButton.setText("C"); cButton.setEnabled(false); gridBagConstraints = new GridBagConstraints(); gridBagConstraints.gridx = 4; gridBagConstraints.gridy = 2; gridBagConstraints.insets = new Insets(0, -35, 0, 0); keyboardPanel.add(cButton, gridBagConstraints); vButton.setFont(new Font("Consolas", 0, 12)); vButton.setText("V"); vButton.setEnabled(false); gridBagConstraints = new GridBagConstraints(); gridBagConstraints.gridx = 5; gridBagConstraints.gridy = 2; gridBagConstraints.insets = new Insets(0, -35, 0, 0); keyboardPanel.add(vButton, gridBagConstraints); bButton.setFont(new Font("Consolas", 0, 12)); bButton.setText("B"); bButton.setEnabled(false); gridBagConstraints = new GridBagConstraints(); gridBagConstraints.gridx = 6; gridBagConstraints.gridy = 2; gridBagConstraints.insets = new Insets(0, -35, 0, 0); keyboardPanel.add(bButton, gridBagConstraints); nButton.setFont(new Font("Consolas", 0, 12)); nButton.setText("N"); nButton.setEnabled(false); gridBagConstraints = new GridBagConstraints(); gridBagConstraints.gridx = 7; gridBagConstraints.gridy = 2; gridBagConstraints.insets = new Insets(0, -35, 0, 0); keyboardPanel.add(nButton, gridBagConstraints); mButton.setFont(new Font("Consolas", 0, 12)); mButton.setText("M"); mButton.setEnabled(false); gridBagConstraints = new GridBagConstraints(); gridBagConstraints.gridx = 8; gridBagConstraints.gridy = 2; gridBagConstraints.insets = new Insets(0, -35, 0, 0); keyboardPanel.add(mButton, gridBagConstraints); gamePanel.setBorder(BorderFactory.createTitledBorder("Hangman")); gamePanel.setEnabled(false); gamePanel.setPreferredSize(new Dimension(248, 180)); imageLabel.setHorizontalAlignment(SwingConstants.CENTER); GroupLayout gamePanelLayout = new GroupLayout(gamePanel); gamePanel.setLayout(gamePanelLayout); gamePanelLayout.setHorizontalGroup( gamePanelLayout.createParallelGroup(GroupLayout.Alignment.LEADING) .addGroup(gamePanelLayout.createSequentialGroup() .addContainerGap() .addComponent(imageLabel, GroupLayout.DEFAULT_SIZE, 193, Short.MAX_VALUE) .addContainerGap()) ); gamePanelLayout.setVerticalGroup( gamePanelLayout.createParallelGroup(GroupLayout.Alignment.LEADING) .addGroup(gamePanelLayout.createSequentialGroup() .addContainerGap() .addComponent(imageLabel, GroupLayout.DEFAULT_SIZE, GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) .addContainerGap()) ); currentPanel.setBorder(BorderFactory.createTitledBorder("Current Word")); currentPanel.setToolTipText(""); currentPanel.setEnabled(false); currentWordLabel.setHorizontalAlignment(SwingConstants.CENTER); currentWordLabel.setText("<html><p>Welcome to Hangman. To begin, press " + "<font face = Consolas color=\"black\">File → New Game</font>," + " or you can just stare at the screen.</p></html>"); currentWordLabel.setEnabled(false); GroupLayout currentPanelLayout = new GroupLayout(currentPanel); currentPanel.setLayout(currentPanelLayout); currentPanelLayout.setHorizontalGroup( currentPanelLayout.createParallelGroup(GroupLayout.Alignment.LEADING) .addGroup(currentPanelLayout.createSequentialGroup() .addContainerGap() .addComponent(currentWordLabel, GroupLayout.PREFERRED_SIZE, 215, GroupLayout.PREFERRED_SIZE) .addContainerGap(GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)) ); currentPanelLayout.setVerticalGroup( currentPanelLayout.createParallelGroup(GroupLayout.Alignment.LEADING) .addGroup(currentPanelLayout.createSequentialGroup() .addContainerGap() .addComponent(currentWordLabel, GroupLayout.DEFAULT_SIZE, 44, Short.MAX_VALUE) .addContainerGap()) ); statisticsPanel.setBorder(BorderFactory.createTitledBorder("Statistics and Options")); statisticsPanel.setEnabled(false); guessesLeftLabel.setHorizontalAlignment(SwingConstants.RIGHT); guessesLeftLabel.setText("Guesses Left"); guessesLeftLabel.setEnabled(false); guessesLeftField.setEditable(false); guessesLeftField.setBackground(new Color(255, 255, 255)); guessesLeftField.setHorizontalAlignment(JTextField.RIGHT); guessesLeftField.setText("0"); guessesLeftField.setEnabled(false); winRateLabel.setHorizontalAlignment(SwingConstants.RIGHT); winRateLabel.setText("Win Rate"); winRateLabel.setEnabled(false); winRateField.setEditable(false); winRateField.setBackground(new Color(255, 255, 255)); winRateField.setHorizontalAlignment(JTextField.RIGHT); winRateField.setText("0"); winRateField.setEnabled(false); giveUpButton.setText("Give Up"); giveUpButton.setEnabled(false); giveUpButton.addActionListener((e) -> attemptGiveUp()); guessedField.setEditable(false); guessedField.setBackground(new Color(255, 255, 255)); guessedField.setText("None."); guessedField.setEnabled(false); guessedLabel.setHorizontalAlignment(SwingConstants.RIGHT); guessedLabel.setText("Guessed Letters"); guessedLabel.setEnabled(false); hintButton.setText("Hint"); hintButton.setEnabled(false); hintButton.addActionListener((e) -> doHint()); newWordButton.setText("New Word"); newWordButton.setEnabled(false); newWordButton.addActionListener((e) -> initGame()); GroupLayout statisticsPanelLayout = new GroupLayout(statisticsPanel); statisticsPanel.setLayout(statisticsPanelLayout); statisticsPanelLayout.setHorizontalGroup( statisticsPanelLayout.createParallelGroup(GroupLayout.Alignment.LEADING) .addGroup(statisticsPanelLayout.createSequentialGroup() .addContainerGap() .addGroup(statisticsPanelLayout.createParallelGroup(GroupLayout.Alignment.LEADING) .addGroup(statisticsPanelLayout.createSequentialGroup() .addComponent(newWordButton, GroupLayout.PREFERRED_SIZE, 111, GroupLayout.PREFERRED_SIZE) .addPreferredGap(LayoutStyle.ComponentPlacement.RELATED, GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) .addComponent(giveUpButton, GroupLayout.PREFERRED_SIZE, 98, GroupLayout.PREFERRED_SIZE)) .addGroup(statisticsPanelLayout.createSequentialGroup() .addGroup(statisticsPanelLayout.createParallelGroup(GroupLayout.Alignment.LEADING) .addGroup(statisticsPanelLayout.createSequentialGroup() .addComponent(winRateLabel) .addPreferredGap(LayoutStyle.ComponentPlacement.RELATED) .addComponent(winRateField, GroupLayout.PREFERRED_SIZE, 133, GroupLayout.PREFERRED_SIZE)) .addGroup(statisticsPanelLayout.createSequentialGroup() .addGroup(statisticsPanelLayout.createParallelGroup(GroupLayout.Alignment.TRAILING) .addComponent(guessesLeftLabel, GroupLayout.DEFAULT_SIZE, GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) .addComponent(guessedLabel, GroupLayout.DEFAULT_SIZE, GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)) .addPreferredGap(LayoutStyle.ComponentPlacement.RELATED) .addGroup(statisticsPanelLayout.createParallelGroup(GroupLayout.Alignment.LEADING) .addComponent(guessedField, GroupLayout.PREFERRED_SIZE, 133, GroupLayout.PREFERRED_SIZE) .addComponent(guessesLeftField, GroupLayout.PREFERRED_SIZE, 133, GroupLayout.PREFERRED_SIZE))) .addComponent(hintButton, GroupLayout.PREFERRED_SIZE, 215, GroupLayout.PREFERRED_SIZE)) .addGap(0, 0, Short.MAX_VALUE))) .addContainerGap()) ); statisticsPanelLayout.linkSize(SwingConstants.HORIZONTAL, new Component[] {guessedLabel, guessesLeftLabel, winRateLabel}); statisticsPanelLayout.setVerticalGroup( statisticsPanelLayout.createParallelGroup(GroupLayout.Alignment.LEADING) .addGroup(statisticsPanelLayout.createSequentialGroup() .addContainerGap() .addGroup(statisticsPanelLayout.createParallelGroup(GroupLayout.Alignment.BASELINE) .addComponent(guessedLabel) .addComponent(guessedField, GroupLayout.PREFERRED_SIZE, GroupLayout.DEFAULT_SIZE, GroupLayout.PREFERRED_SIZE)) .addPreferredGap(LayoutStyle.ComponentPlacement.RELATED) .addGroup(statisticsPanelLayout.createParallelGroup(GroupLayout.Alignment.BASELINE) .addComponent(guessesLeftLabel) .addComponent(guessesLeftField, GroupLayout.PREFERRED_SIZE, 20, GroupLayout.PREFERRED_SIZE)) .addPreferredGap(LayoutStyle.ComponentPlacement.RELATED) .addGroup(statisticsPanelLayout.createParallelGroup(GroupLayout.Alignment.BASELINE) .addComponent(winRateLabel) .addComponent(winRateField, GroupLayout.PREFERRED_SIZE, GroupLayout.DEFAULT_SIZE, GroupLayout.PREFERRED_SIZE)) .addPreferredGap(LayoutStyle.ComponentPlacement.UNRELATED) .addComponent(hintButton) .addPreferredGap(LayoutStyle.ComponentPlacement.RELATED) .addGroup(statisticsPanelLayout.createParallelGroup(GroupLayout.Alignment.BASELINE) .addComponent(newWordButton) .addComponent(giveUpButton)) .addContainerGap(GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)) ); statisticsPanelLayout.linkSize(SwingConstants.VERTICAL, new Component[] {guessedLabel, guessesLeftLabel, winRateLabel}); statisticsPanelLayout.linkSize(SwingConstants.VERTICAL, new Component[] {giveUpButton, hintButton, newWordButton}); fileMenu.setText("File"); newMenuItem.setText("New Game"); newMenuItem.addActionListener((e) -> showSettingsFrame()); fileMenu.add(newMenuItem); resetGameMenuItem.setText("Reset Game"); resetGameMenuItem.addActionListener((e) -> tryResetGame()); fileMenu.add(resetGameMenuItem); fileMenu.add(fileSeparator); fileMenuItem.setText("Exit"); fileMenuItem.addActionListener((e) -> System.exit(0)); fileMenu.add(fileMenuItem); menuBar.add(fileMenu); settingsMenu.setText("Options"); settingsMenuItem.setText("Settings"); settingsMenuItem.addActionListener((e) -> showSettingsFrame()); settingsMenu.add(settingsMenuItem); menuBar.add(settingsMenu); setJMenuBar(menuBar); GroupLayout layout = new GroupLayout(getContentPane()); getContentPane().setLayout(layout); layout.setHorizontalGroup( layout.createParallelGroup(GroupLayout.Alignment.LEADING) .addGroup(layout.createSequentialGroup() .addContainerGap() .addGroup(layout.createParallelGroup(GroupLayout.Alignment.LEADING) .addGroup(layout.createSequentialGroup() .addComponent(gamePanel, GroupLayout.PREFERRED_SIZE, 225, GroupLayout.PREFERRED_SIZE) .addPreferredGap(LayoutStyle.ComponentPlacement.RELATED) .addGroup(layout.createParallelGroup(GroupLayout.Alignment.LEADING, false) .addComponent(currentPanel, GroupLayout.DEFAULT_SIZE, GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) .addComponent(statisticsPanel, GroupLayout.DEFAULT_SIZE, GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE))) .addComponent(keyboardPanel, GroupLayout.PREFERRED_SIZE, 478, GroupLayout.PREFERRED_SIZE)) .addContainerGap(GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)) ); layout.setVerticalGroup( layout.createParallelGroup(GroupLayout.Alignment.LEADING) .addGroup(GroupLayout.Alignment.TRAILING, layout.createSequentialGroup() .addContainerGap() .addGroup(layout.createParallelGroup(GroupLayout.Alignment.LEADING, false) .addGroup(GroupLayout.Alignment.TRAILING, layout.createSequentialGroup() .addComponent(currentPanel, GroupLayout.DEFAULT_SIZE, GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) .addPreferredGap(LayoutStyle.ComponentPlacement.RELATED) .addComponent(statisticsPanel, GroupLayout.DEFAULT_SIZE, GroupLayout.DEFAULT_SIZE, GroupLayout.PREFERRED_SIZE)) .addComponent(gamePanel, GroupLayout.DEFAULT_SIZE, 275, Short.MAX_VALUE)) .addPreferredGap(LayoutStyle.ComponentPlacement.RELATED) .addComponent(keyboardPanel, GroupLayout.DEFAULT_SIZE, 119, Short.MAX_VALUE) .addContainerGap()) ); pack(); } /** * Serves as a common method for parsing and handling the value of an input * event. * * <p> If given event is a {@code KeyEvent}, but the key pressed is not * alphabetic, this method emits an error beep and does nothing. Otherwise, * this method attempts to make the given move. * * <p> If the given event is not {@code KeyEvent}, this method assumes that * it is an {@code ActionEvent}, which may only originate from a * {@code AbstractButton} instance within this class. This method attempts * to make the given move based on the text contained by the button at which * the event originated. * * @param evt The input event at which the guess occurred. This event is a * common superclass of {@link KeyEvent} and {@link ActionEvent}. * @see #makeMove(char) The next method in the chain of events that occur * when a guess is received from the GUI. */ private void parseGuess(EventObject evt) { // Each button on the GUI soft-keyboard has a String with length one // with its representative character. If the given input is an // ActionEvent, then it is safe to assume that it originated from a // {@code JButton}. char guess; if (evt instanceof ActionEvent) { AbstractButton button = (AbstractButton) evt.getSource(); guess = button.getText().charAt(0); makeMove(guess); button.setEnabled(false); } else { // evt is an instance of KeyEvent guess = ((KeyEvent) evt).getKeyChar(); if (Character.isAlphabetic(guess)) { makeMove(guess); disableButton(guess); } else { Toolkit.getDefaultToolkit().beep(); } } } /** * Finds the {@code JButton} in the {@code keyboardPanel} that represents * the given character parameter and disables it. * * @param guess The character guess. */ private void disableButton(char guess) { char sanitizedGuess = Word.sanitizeWord(guess); for (int i = 0; i < keyboardPanel.getComponentCount(); i++) { AbstractButton button = (AbstractButton) keyboardPanel.getComponent(i); char buttonText = Word.sanitizeWord(button.getText().charAt(0)); if (sanitizedGuess == buttonText) { button.setEnabled(false); break; // Only one button for each character, so okay to break } } } /** * Attempts to make a move on the game board. This method updates the game * board appropriately depending on the validity of the guess. * * @param guess The character to attempt to guess. */ private void makeMove(char guess) { boolean valid = game.makeGuess(guess); if (valid) { updateCurrentLabel(); } else { updateImages(); } updateStatistics(); checkGameState(); } /** * Initializes all game variables. */ private void initGame() { // Special clase for when "New Word" is requested when the user has // already guessed characters. if (game.hasAlreadyGuessed() && !game.hasWon()) { lostGame(true); } updateGameSettings(); updateCurrentLabel(); updateImages(); updateStatistics(); setStateOfAll(true); } /** * Asks for user input to reset the game. */ private void tryResetGame() { int reply = showConfirmPane("<html><p>Reset the all scores and the game" + " board back to default?</p></html>", "Reset Confirmation"); if (reply == JOptionPane.YES_OPTION) { guessedField.setText("None."); gamesPlayed = 0; gamesWon = 0; initGame(); } } /** * Attempts to get user input on whether or not to "give up" or throw this * current game. If the user specifies yes, the current game is considered a * loss and the user is shown the correct word. Otherwise, nothing happens. */ private void attemptGiveUp() { int response = showConfirmPane("Really give up and show the word?", "Give Up?"); if (response == JOptionPane.YES_OPTION) { lostGame(false); } } /** * Attempts to give the user a hint. */ private void doHint() { if (game.giveHint()) { disableButton(game.lastGuess()); updateCurrentLabel(); updateImages(); updateStatistics(); checkGameState(); } } /** * Returns the currently selected difficulty. * * @return The currently selected difficulty. */ private WordProperties getUserSelectedDifficulty() { WordProperties difficulty = WordProperties.HARD_WORD; if (easyRadioButton.isSelected()) { difficulty = WordProperties.EASY_WORD; } else if (mediumRadioButton.isSelected()) { difficulty = WordProperties.MEDIUM_WORD; } return difficulty; } /** * Returns the actor currently selected by the user. * * @return The actor currently selected by the user. */ private Actor getUserSelectedActor() { Actor actor = Actor.values()[0]; String selected = actorComboBox.getSelectedItem().toString(); for (Actor a : Actor.values()) { if (a.getName().equals(selected)) { actor = a; break; // Each actor is enumerated only once, so okay to break. } } return actor; } /** * Displays the settings {@code JFrame}. */ private void showSettingsFrame() { settingsFrame.setVisible(true); } /** * Updates the game with the currently given set of user settings. */ private void updateGameSettings() { WordProperties difficulty = getUserSelectedDifficulty(); Actor actor = getUserSelectedActor(); game.resetGame(difficulty); game.setActor(actor); dictionaryList.updateUI(); } /** * Updates the current label which displays the current word to accurately * reflect the state of the game. */ private void updateCurrentLabel() { String formatted = StringUtilities.delimit(game.getCorrectGuesses(), ' '); currentWordLabel.setFont(new Font("Tahoma", Font.BOLD, 16)); currentWordLabel.setText(formatted); } /** * Updates the current set of images on the game panel. */ private void updateImages() { int index = game.maxGuesses() - game.getGuessesRemaining() - 1; if (index < game.maxGuesses()) { imageLabel.setIcon(game.getActor().getImageArray()[index]); } } /** * Updates the statistics display panel. */ private void updateStatistics() { String guessed = StringUtilities.sort(game.getAlreadyGuessed().toUpperCase()); guessed = StringUtilities.formattedToString(guessed); guessedField.setText(guessed); int remaining = game.getGuessesRemaining(); guessesLeftField.setText(remaining + ""); String winRate = StringUtilities.doubleAsPercent((double) gamesWon / gamesPlayed); winRateField.setText(winRate); String gameInfo = "Games won/played : " + gamesWon + '/' + gamesPlayed + '.'; winRateField.setToolTipText(gameInfo); winRateLabel.setToolTipText(gameInfo); String cheaterWord = "The current word is " + game.getCurrentWord() + '.'; currentWordLabel.setToolTipText(cheaterWord); String hintText = hintButton.getText(); if ((game.correctGuessesToWin() == 1 || game.getGuessesRemaining() == 1) && hintButton.isEnabled()) { hintText = "No hints on the last move!"; hintButton.setEnabled(false); } else { int hintsRemaining = game.getHintsRemaining(); if (hintsRemaining > 0) { hintText = "Hint (" + hintsRemaining + ")"; } else { hintText = "Out of hints!"; hintButton.setEnabled(false); } } hintButton.setText(hintText); } /** * Checks if the user has won or lost the game. */ private void checkGameState() { if (game.hasWon()) { wonGame(false); } else if (!game.canGuess()) { lostGame(false); } } /** * Handles the winner state of the game, making the necessary increments to * the games won and played. * * @param quietMode Flag for displaying a message pane. */ private void wonGame(boolean quietMode) { gamesWon++; gamesPlayed++; if (!quietMode) { gameEnded("Nice guessing! \"" + StringUtilities.asSentence(game.getCurrentWord()) + "\" was the correct word!", "Winner!"); } } /** * Handles the loser state of the game, making the necessary increments to * the games played. * * @param quietMode Flag for displaying a message pane. */ private void lostGame(boolean quietMode) { imageLabel.setIcon(game.getActor().getImageArray()[game.maxGuesses() - 1]); gamesPlayed++; if (!quietMode) { gameEnded("Sorry! \"" + StringUtilities.asSentence(game.getCurrentWord()) + "\" was the correct word!", "Loser!"); } } /** * Ensures the GUI is kept properly updated at the end of a game. * * @param message The message to display a message pane with. * @param title The title of the message pane to display. */ private void gameEnded(String message, String title) { String actual = StringUtilities.delimit(game.getCurrentWord(), ' '); updateStatistics(); currentWordLabel.setText(actual); setStateOf(keyboardPanel, false); giveUpButton.setEnabled(false); hintButton.setEnabled(false); hintButton.setText("Hint"); showMessagePane(message, title); } /** * Displays a {@code JOptionPane} confirmation dialog using the given * arguments. * * @param message The message to display on the pane. * @param title The title of the pane. * @return The outcome of the user input. */ private static int showConfirmPane(String message, String title) { return JOptionPane.showConfirmDialog(null, message, title, JOptionPane.INFORMATION_MESSAGE); } /** * Displays a {@code JOptionPane} information window using the given * arguments. * * @param message The message to display on the pane. * @param title The title of the pane. */ private static void showMessagePane(String message, String title) { JOptionPane.showMessageDialog(null, message, title, JOptionPane.INFORMATION_MESSAGE); } /** * Displays a {@code JOptionPane} information window using the given * arguments. * * @param message The message to display on the pane. * @param title The title of the pane. */ private static void showErrorPane(String message, String title) { JOptionPane.showMessageDialog(null, message, title, JOptionPane.ERROR_MESSAGE); } /** * Prompts the user if they would like to start a new game. * * @param message The message to display on the pane. * @param title The title of the pane. */ protected void newGameDialog(String message, String title) { int response = showConfirmPane(message, title); if (response == JOptionPane.YES_OPTION) { showSettingsFrame(); } } /** * The main method for this package. Creates and displays a * {@code Hangman_GUI} form. * * @param args The command-line arguments. */ public static void main(String args[]) { // Sets the system look and feel try { UIManager.setLookAndFeel(UIManager.getSystemLookAndFeelClassName()); } catch (ClassNotFoundException | InstantiationException | IllegalAccessException | UnsupportedLookAndFeelException ex) { Logger.getLogger(ColorPicker.class.getName()) .log(Level.SEVERE, "Error with look and feel settings. " + "Check if look and feels are installed correctly", ex); } SwingUtilities.invokeLater(() -> { Hangman_GUI gui = new Hangman_GUI(); gui.setVisible(true); gui.newGameDialog("Would you like to start a new game?", "New Game"); }); } }
src/game/Hangman_GUI.java
package game; import java.awt.Color; import java.awt.Component; import java.awt.Container; import java.awt.Dimension; import java.awt.Font; import java.awt.GridBagConstraints; import java.awt.GridBagLayout; import java.awt.Insets; import java.awt.Toolkit; import java.awt.Window; import java.awt.event.ActionEvent; import java.awt.event.KeyAdapter; import java.awt.event.KeyEvent; import java.text.NumberFormat; import java.util.ArrayList; import java.util.EventObject; import java.util.List; import java.util.function.Consumer; import java.util.logging.Level; import java.util.logging.Logger; import java.util.stream.Stream; import javafx.scene.control.ColorPicker; import javax.swing.AbstractButton; import javax.swing.BorderFactory; import javax.swing.ButtonGroup; import javax.swing.DefaultComboBoxModel; import javax.swing.DefaultListModel; import javax.swing.GroupLayout; import javax.swing.JButton; import javax.swing.JComboBox; import javax.swing.JFrame; import javax.swing.JLabel; import javax.swing.JList; import javax.swing.JMenu; import javax.swing.JMenuBar; import javax.swing.JMenuItem; import javax.swing.JOptionPane; import javax.swing.JPanel; import javax.swing.JPopupMenu; import javax.swing.JRadioButton; import javax.swing.JScrollPane; import javax.swing.JTextField; import javax.swing.LayoutStyle; import javax.swing.SwingConstants; import javax.swing.SwingUtilities; import javax.swing.UIManager; import javax.swing.UnsupportedLookAndFeelException; import javax.swing.WindowConstants; import language.Word; import language.WordProperties; import utilities.functions.StringUtilities; /** * The {@code Hangman_GUI} class provides for a user interface for the * {@code hangman} package. * * @author Oliver Abdulrahim */ public class Hangman_GUI extends JFrame { /** * The cerealVersionUID for this class. */ private static final long serialVersionUID = -4227892083846427803L; private JButton aButton; private JComboBox<String> actorComboBox; private JLabel actorLabel; private JButton bButton; private JButton cButton; private JLabel currentWordLabel; private JPanel currentPanel; private JButton dButton; private JList<Word> dictionaryList; private ButtonGroup difficultyButtonGroup; private JLabel difficultyLabel; private JButton eButton; private JRadioButton easyRadioButton; private JPanel editPanel; private JButton fButton; private JMenu fileMenu; private JMenuItem fileMenuItem; private JPopupMenu.Separator fileSeparator; private JButton gButton; private JPanel gamePanel; private JButton giveUpButton; private JTextField guessedField; private JLabel guessedLabel; private JTextField guessesLeftField; private JLabel guessesLeftLabel; private JButton hButton; private JRadioButton hardRadioButton; private JButton hintButton; private JButton iButton; private JLabel imageLabel; private JButton jButton; private JButton kButton; private JPanel keyboardPanel; private JButton lButton; private JButton mButton; private JRadioButton mediumRadioButton; private JMenuBar menuBar; private JButton nButton; private JMenuItem newMenuItem; private JButton newWordButton; private JButton oButton; private JButton okButton; private JPanel okPanel; private JButton pButton; private JButton qButton; private JButton rButton; private JMenuItem resetGameMenuItem; private JButton sButton; private JFrame settingsFrame; private JMenu settingsMenu; private JMenuItem settingsMenuItem; private JPanel statisticsPanel; private JButton tButton; private JButton uButton; private JButton vButton; private JButton wButton; private JTextField winRateField; private JLabel winRateLabel; private JPanel wordDisplayPanel; private JScrollPane wordScrollPane; private JButton xButton; private JButton yButton; private JButton zButton; private final Hangman game; private int gamesPlayed; private int gamesWon; /** * Creates new, default {@code Hangman_GUI} form. */ public Hangman_GUI() { game = new Hangman(); gamesPlayed = 0; gamesWon = 0; initComponents(); addTypingListeners(); addButtonListeners(); setUpDictionaryList(); } /** * Adds action listeners for keyboard input to each component contained * within this object. */ private void addTypingListeners() { applyToAll((Component c) -> { c.addKeyListener(new KeyAdapter() { @Override public void keyReleased(KeyEvent evt) { parseGuess(evt); } }); }); } /** * Adds action listeners for click input to each button on the soft keyboard * of the GUI. */ private void addButtonListeners() { // keyboardPanel only has JButtons - okay to cast applyTo(keyboardPanel, (Component c) -> { ((AbstractButton) c).addActionListener(this :: parseGuess); }); } /** * Recursively returns all components within a given container, including * any children that are also {@code Container}s. * * @param container The component whose components to retrieve. * @return All components within a given container. */ public static List<Component> getAllComponents(Container container) { Component[] components = container.getComponents(); List<Component> compList = new ArrayList<>(); for (Component c : components) { compList.add(c); if (c instanceof Container) { compList.addAll(getAllComponents((Container) c)); } } return compList; } /** * Applies a given {@code Consumer} to every {@code Component} contained in * this object. * * @param action The {@code Consumer} to apply to every {@code Component} * contained in this object. */ private void applyToAll(Consumer<? super Component> action) { List<Component> allComponents = getAllComponents(this); allComponents.stream().forEach(action); } /** * Applies a given {@code Consumer} to every {@code Component} contained in * the specified {@code Container}. * * @param action The {@code Consumer} to apply to every {@code Component} * contained in the specified {@code Container}. */ private static void applyTo(Container container, Consumer<? super Component> action) { Stream.of(container.getComponents()).forEach(action); } /** * Enables or disables all components contained within this object. * * @param state The state to set every {@code Component} to. */ public void setStateOfAll(boolean state) { applyToAll((Component c) -> { c.setEnabled(state); }); } /** * Enables or disables all components contained within the given * {@code Container}. * * @param container The {@code Container} whose components to set the state * of. * @param state The state to set every {@code Component} to. */ public static void setStateOf(Container container, boolean state) { applyTo(container, (Component c) -> { c.setEnabled(state); }); } /** * Sets up the list of words that display each item in the game's * dictionary. */ private void setUpDictionaryList() { dictionaryList.setModel(new DefaultListModel<Word>() { private static final long serialVersionUID = 938467039846L; @Override public int size() { return game.getWords().cacheList().size(); } @Override public int getSize() { return size(); } @Override public Word get(int index) { return game.getWords().cacheList().get(index); } @Override public Word elementAt(int index) { return get(index); } @Override public Word getElementAt(int i) { return get(i); } @Override public Word remove(int index) { return game.getWords().cacheList().remove(index); } @Override public void removeElementAt(int index) { remove(index); } @Override public void add(int index, Word element) { game.getWords().cacheList().add(index, element); } @Override public void addElement(Word element) { add(0, element); } }); } /** * Called from within the constructor to initialize the form. */ private void initComponents() { GridBagConstraints gridBagConstraints; settingsFrame = new JFrame(); okPanel = new JPanel(); okButton = new JButton(); wordDisplayPanel = new JPanel(); wordScrollPane = new JScrollPane(); dictionaryList = new JList<>(); editPanel = new JPanel(); difficultyLabel = new JLabel(); easyRadioButton = new JRadioButton(); mediumRadioButton = new JRadioButton(); hardRadioButton = new JRadioButton(); actorLabel = new JLabel(); actorComboBox = new JComboBox<>(); difficultyButtonGroup = new ButtonGroup(); keyboardPanel = new JPanel(); qButton = new JButton(); wButton = new JButton(); eButton = new JButton(); rButton = new JButton(); tButton = new JButton(); yButton = new JButton(); uButton = new JButton(); iButton = new JButton(); oButton = new JButton(); pButton = new JButton(); aButton = new JButton(); sButton = new JButton(); dButton = new JButton(); fButton = new JButton(); gButton = new JButton(); hButton = new JButton(); jButton = new JButton(); kButton = new JButton(); lButton = new JButton(); zButton = new JButton(); xButton = new JButton(); cButton = new JButton(); vButton = new JButton(); bButton = new JButton(); nButton = new JButton(); mButton = new JButton(); gamePanel = new JPanel(); imageLabel = new JLabel(); currentPanel = new JPanel(); currentWordLabel = new JLabel(); statisticsPanel = new JPanel(); guessesLeftLabel = new JLabel(); guessesLeftField = new JTextField(); winRateLabel = new JLabel(); winRateField = new JTextField(); giveUpButton = new JButton(); guessedField = new JTextField(); guessedLabel = new JLabel(); hintButton = new JButton(); newWordButton = new JButton(); menuBar = new JMenuBar(); fileMenu = new JMenu(); newMenuItem = new JMenuItem(); resetGameMenuItem = new JMenuItem(); fileSeparator = new JPopupMenu.Separator(); fileMenuItem = new JMenuItem(); settingsMenu = new JMenu(); settingsMenuItem = new JMenuItem(); settingsFrame.setDefaultCloseOperation(WindowConstants.DISPOSE_ON_CLOSE); settingsFrame.setTitle("Settings"); settingsFrame.setLocationByPlatform(true); settingsFrame.setMaximumSize(new Dimension(374, 309)); settingsFrame.setMinimumSize(new Dimension(374, 309)); settingsFrame.setPreferredSize(new Dimension(374, 309)); settingsFrame.setResizable(false); settingsFrame.setType(Window.Type.POPUP); okPanel.setBorder(BorderFactory.createTitledBorder("")); okButton.setText("OK"); okButton.addActionListener((ActionEvent e) -> { initGame(); settingsFrame.dispose(); }); GroupLayout okPanelLayout = new GroupLayout(okPanel); okPanel.setLayout(okPanelLayout); okPanelLayout.setHorizontalGroup( okPanelLayout.createParallelGroup(GroupLayout.Alignment.LEADING) .addGroup(okPanelLayout.createSequentialGroup() .addGap(113, 113, 113) .addComponent(okButton, GroupLayout.PREFERRED_SIZE, 115, GroupLayout.PREFERRED_SIZE) .addContainerGap(GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)) ); okPanelLayout.setVerticalGroup( okPanelLayout.createParallelGroup(GroupLayout.Alignment.LEADING) .addGroup(okPanelLayout.createSequentialGroup() .addContainerGap() .addComponent(okButton) .addContainerGap(GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)) ); wordDisplayPanel.setBorder(BorderFactory.createTitledBorder("Dictionary Words")); wordDisplayPanel.setToolTipText(""); dictionaryList.setFont(new Font("Consolas", 0, 11)); dictionaryList.setToolTipText(NumberFormat.getInstance().format(game.getWords().size()) + " words in this dictionary."); wordScrollPane.setViewportView(dictionaryList); GroupLayout wordDisplayPanelLayout = new GroupLayout(wordDisplayPanel); wordDisplayPanel.setLayout(wordDisplayPanelLayout); wordDisplayPanelLayout.setHorizontalGroup( wordDisplayPanelLayout.createParallelGroup(GroupLayout.Alignment.LEADING) .addGroup(wordDisplayPanelLayout.createSequentialGroup() .addContainerGap() .addComponent(wordScrollPane, GroupLayout.DEFAULT_SIZE, 177, Short.MAX_VALUE) .addContainerGap()) ); wordDisplayPanelLayout.setVerticalGroup( wordDisplayPanelLayout.createParallelGroup(GroupLayout.Alignment.LEADING) .addGroup(wordDisplayPanelLayout.createSequentialGroup() .addContainerGap() .addComponent(wordScrollPane) .addContainerGap()) ); editPanel.setBorder(BorderFactory.createTitledBorder("Game Options")); editPanel.setToolTipText(""); difficultyLabel.setText("<html><p>Select word difficulty.</p></html>"); difficultyButtonGroup.add(easyRadioButton); easyRadioButton.setText("Easy"); easyRadioButton.addActionListener((e) -> updateGameSettings()); difficultyButtonGroup.add(mediumRadioButton); mediumRadioButton.setSelected(true); mediumRadioButton.setText("Medium"); mediumRadioButton.addActionListener((e) -> updateGameSettings()); difficultyButtonGroup.add(hardRadioButton); hardRadioButton.setText("Hard"); hardRadioButton.addActionListener((e) -> updateGameSettings()); actorLabel.setText("<html><p>Select a set of images to use.</p></html>"); actorComboBox.setModel(new DefaultComboBoxModel<>(Actor.allNames())); GroupLayout editPanelLayout = new GroupLayout(editPanel); editPanel.setLayout(editPanelLayout); editPanelLayout.setHorizontalGroup( editPanelLayout.createParallelGroup(GroupLayout.Alignment.LEADING) .addGroup(editPanelLayout.createSequentialGroup() .addContainerGap() .addGroup(editPanelLayout.createParallelGroup(GroupLayout.Alignment.LEADING, false) .addComponent(actorComboBox, 0, 104, Short.MAX_VALUE) .addComponent(hardRadioButton, GroupLayout.DEFAULT_SIZE, GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) .addComponent(easyRadioButton, GroupLayout.DEFAULT_SIZE, GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) .addComponent(difficultyLabel) .addComponent(mediumRadioButton, GroupLayout.DEFAULT_SIZE, GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) .addComponent(actorLabel, GroupLayout.PREFERRED_SIZE, 0, Short.MAX_VALUE)) .addContainerGap(GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)) ); editPanelLayout.linkSize(SwingConstants.HORIZONTAL, new Component[] {actorComboBox, difficultyLabel, easyRadioButton, hardRadioButton, mediumRadioButton}); editPanelLayout.setVerticalGroup( editPanelLayout.createParallelGroup(GroupLayout.Alignment.LEADING) .addGroup(GroupLayout.Alignment.TRAILING, editPanelLayout.createSequentialGroup() .addContainerGap() .addComponent(difficultyLabel, GroupLayout.PREFERRED_SIZE, GroupLayout.DEFAULT_SIZE, GroupLayout.PREFERRED_SIZE) .addPreferredGap(LayoutStyle.ComponentPlacement.RELATED) .addComponent(easyRadioButton) .addPreferredGap(LayoutStyle.ComponentPlacement.RELATED) .addComponent(mediumRadioButton) .addPreferredGap(LayoutStyle.ComponentPlacement.RELATED) .addComponent(hardRadioButton) .addGap(13, 13, 13) .addComponent(actorLabel, GroupLayout.PREFERRED_SIZE, GroupLayout.DEFAULT_SIZE, GroupLayout.PREFERRED_SIZE) .addPreferredGap(LayoutStyle.ComponentPlacement.UNRELATED) .addComponent(actorComboBox, GroupLayout.PREFERRED_SIZE, GroupLayout.DEFAULT_SIZE, GroupLayout.PREFERRED_SIZE) .addContainerGap(13, Short.MAX_VALUE)) ); editPanelLayout.linkSize(SwingConstants.VERTICAL, new Component[] {easyRadioButton, hardRadioButton, mediumRadioButton}); GroupLayout settingsFrameLayout = new GroupLayout(settingsFrame.getContentPane()); settingsFrame.getContentPane().setLayout(settingsFrameLayout); settingsFrameLayout.setHorizontalGroup( settingsFrameLayout.createParallelGroup(GroupLayout.Alignment.LEADING) .addGroup(settingsFrameLayout.createSequentialGroup() .addContainerGap() .addGroup(settingsFrameLayout.createParallelGroup(GroupLayout.Alignment.LEADING, false) .addComponent(okPanel, GroupLayout.DEFAULT_SIZE, GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) .addGroup(settingsFrameLayout.createSequentialGroup() .addComponent(wordDisplayPanel, GroupLayout.PREFERRED_SIZE, GroupLayout.DEFAULT_SIZE, GroupLayout.PREFERRED_SIZE) .addPreferredGap(LayoutStyle.ComponentPlacement.RELATED) .addComponent(editPanel, GroupLayout.PREFERRED_SIZE, 131, GroupLayout.PREFERRED_SIZE))) .addContainerGap(GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)) ); settingsFrameLayout.setVerticalGroup( settingsFrameLayout.createParallelGroup(GroupLayout.Alignment.LEADING) .addGroup(settingsFrameLayout.createSequentialGroup() .addContainerGap() .addGroup(settingsFrameLayout.createParallelGroup(GroupLayout.Alignment.LEADING) .addComponent(wordDisplayPanel, GroupLayout.DEFAULT_SIZE, GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) .addComponent(editPanel, GroupLayout.DEFAULT_SIZE, GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)) .addPreferredGap(LayoutStyle.ComponentPlacement.RELATED) .addComponent(okPanel, GroupLayout.PREFERRED_SIZE, GroupLayout.DEFAULT_SIZE, GroupLayout.PREFERRED_SIZE) .addContainerGap()) ); setDefaultCloseOperation(WindowConstants.EXIT_ON_CLOSE); setTitle("Hangman"); setLocationByPlatform(true); setResizable(false); addKeyListener(new KeyAdapter() { @Override public void keyReleased(KeyEvent evt) { parseGuess(evt); } }); keyboardPanel.setBorder(BorderFactory.createTitledBorder("Keyboard")); keyboardPanel.setEnabled(false); keyboardPanel.setLayout(new GridBagLayout()); qButton.setFont(new Font("Consolas", 0, 12)); qButton.setText("Q"); qButton.setEnabled(false); gridBagConstraints = new GridBagConstraints(); gridBagConstraints.gridx = 0; gridBagConstraints.gridy = 0; keyboardPanel.add(qButton, gridBagConstraints); wButton.setFont(new Font("Consolas", 0, 12)); wButton.setText("W"); wButton.setEnabled(false); gridBagConstraints = new GridBagConstraints(); gridBagConstraints.gridx = 1; gridBagConstraints.gridy = 0; keyboardPanel.add(wButton, gridBagConstraints); eButton.setFont(new Font("Consolas", 0, 12)); eButton.setText("E"); eButton.setEnabled(false); gridBagConstraints = new GridBagConstraints(); gridBagConstraints.gridx = 2; gridBagConstraints.gridy = 0; keyboardPanel.add(eButton, gridBagConstraints); rButton.setFont(new Font("Consolas", 0, 12)); rButton.setText("R"); rButton.setEnabled(false); gridBagConstraints = new GridBagConstraints(); gridBagConstraints.gridx = 3; gridBagConstraints.gridy = 0; keyboardPanel.add(rButton, gridBagConstraints); tButton.setFont(new Font("Consolas", 0, 12)); tButton.setText("T"); tButton.setEnabled(false); gridBagConstraints = new GridBagConstraints(); gridBagConstraints.gridx = 4; gridBagConstraints.gridy = 0; keyboardPanel.add(tButton, gridBagConstraints); yButton.setFont(new Font("Consolas", 0, 12)); yButton.setText("Y"); yButton.setEnabled(false); gridBagConstraints = new GridBagConstraints(); gridBagConstraints.gridx = 5; gridBagConstraints.gridy = 0; keyboardPanel.add(yButton, gridBagConstraints); uButton.setFont(new Font("Consolas", 0, 12)); uButton.setText("U"); uButton.setEnabled(false); gridBagConstraints = new GridBagConstraints(); gridBagConstraints.gridx = 6; gridBagConstraints.gridy = 0; keyboardPanel.add(uButton, gridBagConstraints); iButton.setFont(new Font("Consolas", 0, 12)); iButton.setText("I"); iButton.setEnabled(false); gridBagConstraints = new GridBagConstraints(); gridBagConstraints.gridx = 7; gridBagConstraints.gridy = 0; keyboardPanel.add(iButton, gridBagConstraints); oButton.setFont(new Font("Consolas", 0, 12)); oButton.setText("O"); oButton.setEnabled(false); gridBagConstraints = new GridBagConstraints(); gridBagConstraints.gridx = 8; gridBagConstraints.gridy = 0; keyboardPanel.add(oButton, gridBagConstraints); pButton.setFont(new Font("Consolas", 0, 12)); pButton.setText("P"); pButton.setEnabled(false); gridBagConstraints = new GridBagConstraints(); gridBagConstraints.gridx = 9; gridBagConstraints.gridy = 0; keyboardPanel.add(pButton, gridBagConstraints); aButton.setFont(new Font("Consolas", 0, 12)); aButton.setText("A"); aButton.setEnabled(false); gridBagConstraints = new GridBagConstraints(); gridBagConstraints.gridx = 1; gridBagConstraints.gridy = 1; gridBagConstraints.insets = new Insets(0, -35, 0, 0); keyboardPanel.add(aButton, gridBagConstraints); sButton.setFont(new Font("Consolas", 0, 12)); sButton.setText("S"); sButton.setEnabled(false); gridBagConstraints = new GridBagConstraints(); gridBagConstraints.gridx = 2; gridBagConstraints.gridy = 1; gridBagConstraints.insets = new Insets(0, -35, 0, 0); keyboardPanel.add(sButton, gridBagConstraints); dButton.setFont(new Font("Consolas", 0, 12)); dButton.setText("D"); dButton.setEnabled(false); gridBagConstraints = new GridBagConstraints(); gridBagConstraints.gridx = 3; gridBagConstraints.gridy = 1; gridBagConstraints.insets = new Insets(0, -35, 0, 0); keyboardPanel.add(dButton, gridBagConstraints); fButton.setFont(new Font("Consolas", 0, 12)); fButton.setText("F"); fButton.setEnabled(false); gridBagConstraints = new GridBagConstraints(); gridBagConstraints.gridx = 4; gridBagConstraints.gridy = 1; gridBagConstraints.insets = new Insets(0, -35, 0, 0); keyboardPanel.add(fButton, gridBagConstraints); gButton.setFont(new Font("Consolas", 0, 12)); gButton.setText("G"); gButton.setEnabled(false); gridBagConstraints = new GridBagConstraints(); gridBagConstraints.gridx = 5; gridBagConstraints.gridy = 1; gridBagConstraints.insets = new Insets(0, -35, 0, 0); keyboardPanel.add(gButton, gridBagConstraints); hButton.setFont(new Font("Consolas", 0, 12)); hButton.setText("H"); hButton.setEnabled(false); gridBagConstraints = new GridBagConstraints(); gridBagConstraints.gridx = 6; gridBagConstraints.gridy = 1; gridBagConstraints.insets = new Insets(0, -35, 0, 0); keyboardPanel.add(hButton, gridBagConstraints); jButton.setFont(new Font("Consolas", 0, 12)); jButton.setText("J"); jButton.setEnabled(false); gridBagConstraints = new GridBagConstraints(); gridBagConstraints.gridx = 7; gridBagConstraints.gridy = 1; gridBagConstraints.insets = new Insets(0, -35, 0, 0); keyboardPanel.add(jButton, gridBagConstraints); kButton.setFont(new Font("Consolas", 0, 12)); kButton.setText("K"); kButton.setEnabled(false); gridBagConstraints = new GridBagConstraints(); gridBagConstraints.gridx = 8; gridBagConstraints.gridy = 1; gridBagConstraints.insets = new Insets(0, -35, 0, 0); keyboardPanel.add(kButton, gridBagConstraints); lButton.setFont(new Font("Consolas", 0, 12)); lButton.setText("L"); lButton.setEnabled(false); gridBagConstraints = new GridBagConstraints(); gridBagConstraints.gridx = 9; gridBagConstraints.gridy = 1; gridBagConstraints.insets = new Insets(0, -35, 0, 0); keyboardPanel.add(lButton, gridBagConstraints); zButton.setFont(new Font("Consolas", 0, 12)); zButton.setText("Z"); zButton.setEnabled(false); gridBagConstraints = new GridBagConstraints(); gridBagConstraints.gridx = 2; gridBagConstraints.gridy = 2; gridBagConstraints.insets = new Insets(0, -35, 0, 0); keyboardPanel.add(zButton, gridBagConstraints); xButton.setFont(new Font("Consolas", 0, 12)); xButton.setText("X"); xButton.setEnabled(false); gridBagConstraints = new GridBagConstraints(); gridBagConstraints.gridx = 3; gridBagConstraints.gridy = 2; gridBagConstraints.insets = new Insets(0, -35, 0, 0); keyboardPanel.add(xButton, gridBagConstraints); cButton.setFont(new Font("Consolas", 0, 12)); cButton.setText("C"); cButton.setEnabled(false); gridBagConstraints = new GridBagConstraints(); gridBagConstraints.gridx = 4; gridBagConstraints.gridy = 2; gridBagConstraints.insets = new Insets(0, -35, 0, 0); keyboardPanel.add(cButton, gridBagConstraints); vButton.setFont(new Font("Consolas", 0, 12)); vButton.setText("V"); vButton.setEnabled(false); gridBagConstraints = new GridBagConstraints(); gridBagConstraints.gridx = 5; gridBagConstraints.gridy = 2; gridBagConstraints.insets = new Insets(0, -35, 0, 0); keyboardPanel.add(vButton, gridBagConstraints); bButton.setFont(new Font("Consolas", 0, 12)); bButton.setText("B"); bButton.setEnabled(false); gridBagConstraints = new GridBagConstraints(); gridBagConstraints.gridx = 6; gridBagConstraints.gridy = 2; gridBagConstraints.insets = new Insets(0, -35, 0, 0); keyboardPanel.add(bButton, gridBagConstraints); nButton.setFont(new Font("Consolas", 0, 12)); nButton.setText("N"); nButton.setEnabled(false); gridBagConstraints = new GridBagConstraints(); gridBagConstraints.gridx = 7; gridBagConstraints.gridy = 2; gridBagConstraints.insets = new Insets(0, -35, 0, 0); keyboardPanel.add(nButton, gridBagConstraints); mButton.setFont(new Font("Consolas", 0, 12)); mButton.setText("M"); mButton.setEnabled(false); gridBagConstraints = new GridBagConstraints(); gridBagConstraints.gridx = 8; gridBagConstraints.gridy = 2; gridBagConstraints.insets = new Insets(0, -35, 0, 0); keyboardPanel.add(mButton, gridBagConstraints); gamePanel.setBorder(BorderFactory.createTitledBorder("Hangman")); gamePanel.setEnabled(false); gamePanel.setPreferredSize(new Dimension(248, 180)); imageLabel.setHorizontalAlignment(SwingConstants.CENTER); GroupLayout gamePanelLayout = new GroupLayout(gamePanel); gamePanel.setLayout(gamePanelLayout); gamePanelLayout.setHorizontalGroup( gamePanelLayout.createParallelGroup(GroupLayout.Alignment.LEADING) .addGroup(gamePanelLayout.createSequentialGroup() .addContainerGap() .addComponent(imageLabel, GroupLayout.DEFAULT_SIZE, 193, Short.MAX_VALUE) .addContainerGap()) ); gamePanelLayout.setVerticalGroup( gamePanelLayout.createParallelGroup(GroupLayout.Alignment.LEADING) .addGroup(gamePanelLayout.createSequentialGroup() .addContainerGap() .addComponent(imageLabel, GroupLayout.DEFAULT_SIZE, GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) .addContainerGap()) ); currentPanel.setBorder(BorderFactory.createTitledBorder("Current Word")); currentPanel.setToolTipText(""); currentPanel.setEnabled(false); currentWordLabel.setHorizontalAlignment(SwingConstants.CENTER); currentWordLabel.setText("<html><p>Welcome to Hangman. To begin, press " + "<font face = Consolas color=\"black\">File → New Game</font>," + " or you can just stare at the screen.</p></html>"); currentWordLabel.setEnabled(false); GroupLayout currentPanelLayout = new GroupLayout(currentPanel); currentPanel.setLayout(currentPanelLayout); currentPanelLayout.setHorizontalGroup( currentPanelLayout.createParallelGroup(GroupLayout.Alignment.LEADING) .addGroup(currentPanelLayout.createSequentialGroup() .addContainerGap() .addComponent(currentWordLabel, GroupLayout.PREFERRED_SIZE, 215, GroupLayout.PREFERRED_SIZE) .addContainerGap(GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)) ); currentPanelLayout.setVerticalGroup( currentPanelLayout.createParallelGroup(GroupLayout.Alignment.LEADING) .addGroup(currentPanelLayout.createSequentialGroup() .addContainerGap() .addComponent(currentWordLabel, GroupLayout.DEFAULT_SIZE, 44, Short.MAX_VALUE) .addContainerGap()) ); statisticsPanel.setBorder(BorderFactory.createTitledBorder("Statistics and Options")); statisticsPanel.setEnabled(false); guessesLeftLabel.setHorizontalAlignment(SwingConstants.RIGHT); guessesLeftLabel.setText("Guesses Left"); guessesLeftLabel.setEnabled(false); guessesLeftField.setEditable(false); guessesLeftField.setBackground(new Color(255, 255, 255)); guessesLeftField.setHorizontalAlignment(JTextField.RIGHT); guessesLeftField.setText("0"); guessesLeftField.setEnabled(false); winRateLabel.setHorizontalAlignment(SwingConstants.RIGHT); winRateLabel.setText("Win Rate"); winRateLabel.setEnabled(false); winRateField.setEditable(false); winRateField.setBackground(new Color(255, 255, 255)); winRateField.setHorizontalAlignment(JTextField.RIGHT); winRateField.setText("0"); winRateField.setEnabled(false); giveUpButton.setText("Give Up"); giveUpButton.setEnabled(false); giveUpButton.addActionListener((e) -> attemptGiveUp()); guessedField.setEditable(false); guessedField.setBackground(new Color(255, 255, 255)); guessedField.setText("None."); guessedField.setEnabled(false); guessedLabel.setHorizontalAlignment(SwingConstants.RIGHT); guessedLabel.setText("Guessed Letters"); guessedLabel.setEnabled(false); hintButton.setText("Hint"); hintButton.setEnabled(false); hintButton.addActionListener((e) -> doHint()); newWordButton.setText("New Word"); newWordButton.setEnabled(false); newWordButton.addActionListener((e) -> initGame()); GroupLayout statisticsPanelLayout = new GroupLayout(statisticsPanel); statisticsPanel.setLayout(statisticsPanelLayout); statisticsPanelLayout.setHorizontalGroup( statisticsPanelLayout.createParallelGroup(GroupLayout.Alignment.LEADING) .addGroup(statisticsPanelLayout.createSequentialGroup() .addContainerGap() .addGroup(statisticsPanelLayout.createParallelGroup(GroupLayout.Alignment.LEADING) .addGroup(statisticsPanelLayout.createSequentialGroup() .addComponent(newWordButton, GroupLayout.PREFERRED_SIZE, 111, GroupLayout.PREFERRED_SIZE) .addPreferredGap(LayoutStyle.ComponentPlacement.RELATED, GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) .addComponent(giveUpButton, GroupLayout.PREFERRED_SIZE, 98, GroupLayout.PREFERRED_SIZE)) .addGroup(statisticsPanelLayout.createSequentialGroup() .addGroup(statisticsPanelLayout.createParallelGroup(GroupLayout.Alignment.LEADING) .addGroup(statisticsPanelLayout.createSequentialGroup() .addComponent(winRateLabel) .addPreferredGap(LayoutStyle.ComponentPlacement.RELATED) .addComponent(winRateField, GroupLayout.PREFERRED_SIZE, 133, GroupLayout.PREFERRED_SIZE)) .addGroup(statisticsPanelLayout.createSequentialGroup() .addGroup(statisticsPanelLayout.createParallelGroup(GroupLayout.Alignment.TRAILING) .addComponent(guessesLeftLabel, GroupLayout.DEFAULT_SIZE, GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) .addComponent(guessedLabel, GroupLayout.DEFAULT_SIZE, GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)) .addPreferredGap(LayoutStyle.ComponentPlacement.RELATED) .addGroup(statisticsPanelLayout.createParallelGroup(GroupLayout.Alignment.LEADING) .addComponent(guessedField, GroupLayout.PREFERRED_SIZE, 133, GroupLayout.PREFERRED_SIZE) .addComponent(guessesLeftField, GroupLayout.PREFERRED_SIZE, 133, GroupLayout.PREFERRED_SIZE))) .addComponent(hintButton, GroupLayout.PREFERRED_SIZE, 215, GroupLayout.PREFERRED_SIZE)) .addGap(0, 0, Short.MAX_VALUE))) .addContainerGap()) ); statisticsPanelLayout.linkSize(SwingConstants.HORIZONTAL, new Component[] {guessedLabel, guessesLeftLabel, winRateLabel}); statisticsPanelLayout.setVerticalGroup( statisticsPanelLayout.createParallelGroup(GroupLayout.Alignment.LEADING) .addGroup(statisticsPanelLayout.createSequentialGroup() .addContainerGap() .addGroup(statisticsPanelLayout.createParallelGroup(GroupLayout.Alignment.BASELINE) .addComponent(guessedLabel) .addComponent(guessedField, GroupLayout.PREFERRED_SIZE, GroupLayout.DEFAULT_SIZE, GroupLayout.PREFERRED_SIZE)) .addPreferredGap(LayoutStyle.ComponentPlacement.RELATED) .addGroup(statisticsPanelLayout.createParallelGroup(GroupLayout.Alignment.BASELINE) .addComponent(guessesLeftLabel) .addComponent(guessesLeftField, GroupLayout.PREFERRED_SIZE, 20, GroupLayout.PREFERRED_SIZE)) .addPreferredGap(LayoutStyle.ComponentPlacement.RELATED) .addGroup(statisticsPanelLayout.createParallelGroup(GroupLayout.Alignment.BASELINE) .addComponent(winRateLabel) .addComponent(winRateField, GroupLayout.PREFERRED_SIZE, GroupLayout.DEFAULT_SIZE, GroupLayout.PREFERRED_SIZE)) .addPreferredGap(LayoutStyle.ComponentPlacement.UNRELATED) .addComponent(hintButton) .addPreferredGap(LayoutStyle.ComponentPlacement.RELATED) .addGroup(statisticsPanelLayout.createParallelGroup(GroupLayout.Alignment.BASELINE) .addComponent(newWordButton) .addComponent(giveUpButton)) .addContainerGap(GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)) ); statisticsPanelLayout.linkSize(SwingConstants.VERTICAL, new Component[] {guessedLabel, guessesLeftLabel, winRateLabel}); statisticsPanelLayout.linkSize(SwingConstants.VERTICAL, new Component[] {giveUpButton, hintButton, newWordButton}); fileMenu.setText("File"); newMenuItem.setText("New Game"); newMenuItem.addActionListener((e) -> showSettingsFrame()); fileMenu.add(newMenuItem); resetGameMenuItem.setText("Reset Game"); resetGameMenuItem.addActionListener((e) -> tryResetGame()); fileMenu.add(resetGameMenuItem); fileMenu.add(fileSeparator); fileMenuItem.setText("Exit"); fileMenuItem.addActionListener((e) -> System.exit(0)); fileMenu.add(fileMenuItem); menuBar.add(fileMenu); settingsMenu.setText("Options"); settingsMenuItem.setText("Settings"); settingsMenuItem.addActionListener((e) -> showSettingsFrame()); settingsMenu.add(settingsMenuItem); menuBar.add(settingsMenu); setJMenuBar(menuBar); GroupLayout layout = new GroupLayout(getContentPane()); getContentPane().setLayout(layout); layout.setHorizontalGroup( layout.createParallelGroup(GroupLayout.Alignment.LEADING) .addGroup(layout.createSequentialGroup() .addContainerGap() .addGroup(layout.createParallelGroup(GroupLayout.Alignment.LEADING) .addGroup(layout.createSequentialGroup() .addComponent(gamePanel, GroupLayout.PREFERRED_SIZE, 225, GroupLayout.PREFERRED_SIZE) .addPreferredGap(LayoutStyle.ComponentPlacement.RELATED) .addGroup(layout.createParallelGroup(GroupLayout.Alignment.LEADING, false) .addComponent(currentPanel, GroupLayout.DEFAULT_SIZE, GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) .addComponent(statisticsPanel, GroupLayout.DEFAULT_SIZE, GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE))) .addComponent(keyboardPanel, GroupLayout.PREFERRED_SIZE, 478, GroupLayout.PREFERRED_SIZE)) .addContainerGap(GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)) ); layout.setVerticalGroup( layout.createParallelGroup(GroupLayout.Alignment.LEADING) .addGroup(GroupLayout.Alignment.TRAILING, layout.createSequentialGroup() .addContainerGap() .addGroup(layout.createParallelGroup(GroupLayout.Alignment.LEADING, false) .addGroup(GroupLayout.Alignment.TRAILING, layout.createSequentialGroup() .addComponent(currentPanel, GroupLayout.DEFAULT_SIZE, GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) .addPreferredGap(LayoutStyle.ComponentPlacement.RELATED) .addComponent(statisticsPanel, GroupLayout.DEFAULT_SIZE, GroupLayout.DEFAULT_SIZE, GroupLayout.PREFERRED_SIZE)) .addComponent(gamePanel, GroupLayout.DEFAULT_SIZE, 275, Short.MAX_VALUE)) .addPreferredGap(LayoutStyle.ComponentPlacement.RELATED) .addComponent(keyboardPanel, GroupLayout.DEFAULT_SIZE, 119, Short.MAX_VALUE) .addContainerGap()) ); pack(); } /** * Serves as a common method for parsing and handling the value of an input * event. * * <p> If given event is a {@code KeyEvent}, but the key pressed is not * alphabetic, this method emits an error beep and does nothing. Otherwise, * this method attempts to make the given move. * * <p> If the given event is not {@code KeyEvent}, this method assumes that * it is an {@code ActionEvent}, which may only originate from a * {@code AbstractButton} instance within this class. This method attempts * to make the given move based on the text contained by the button at which * the event originated. * * @param evt The input event at which the guess occurred. This event is a * common superclass of {@link KeyEvent} and {@link ActionEvent}. * @see #makeMove(char) The next method in the chain of events that occur * when a guess is received from the GUI. */ private void parseGuess(EventObject evt) { // Each button on the GUI soft-keyboard has a String with length one // with its representative character. If the given input is an // ActionEvent, then it is safe to assume that it originated from a // {@code JButton}. char guess; if (evt instanceof ActionEvent) { AbstractButton button = (AbstractButton) evt.getSource(); guess = button.getText().charAt(0); makeMove(guess); button.setEnabled(false); } else { // evt is an instance of KeyEvent guess = ((KeyEvent) evt).getKeyChar(); if (Character.isAlphabetic(guess)) { makeMove(guess); disableButton(guess); } else { Toolkit.getDefaultToolkit().beep(); } } } /** * Finds the {@code JButton} in the {@code keyboardPanel} that represents * the given character parameter and disables it. * * @param guess The character guess. */ private void disableButton(char guess) { char sanitizedGuess = Word.sanitizeWord(guess); for (int i = 0; i < keyboardPanel.getComponentCount(); i++) { AbstractButton button = (AbstractButton) keyboardPanel.getComponent(i); char buttonText = Word.sanitizeWord(button.getText().charAt(0)); if (sanitizedGuess == buttonText) { button.setEnabled(false); break; // Only one button for each character, so okay to break } } } /** * Attempts to make a move on the game board. This method updates the game * board appropriately depending on the validity of the guess. * * @param guess The character to attempt to guess. */ private void makeMove(char guess) { boolean valid = game.makeGuess(guess); if (valid) { updateCurrentLabel(); } else { updateImages(); } updateStatistics(); checkGameState(); } /** * Initializes all game variables. */ private void initGame() { // Special clase for when "New Word" is requested when the user has // already guessed characters. if (game.hasAlreadyGuessed() && !game.hasWon()) { lostGame(true); } updateGameSettings(); updateCurrentLabel(); updateImages(); updateStatistics(); setStateOfAll(true); } /** * Asks for user input to reset the game. */ private void tryResetGame() { int reply = showConfirmPane("<html><p>Reset the all scores and the game" + " board back to default?</p></html>", "Reset Confirmation"); if (reply == JOptionPane.YES_OPTION) { guessedField.setText("None."); gamesPlayed = 0; gamesWon = 0; initGame(); } } /** * Attempts to get user input on whether or not to "give up" or throw this * current game. If the user specifies yes, the current game is considered a * loss and the user is shown the correct word. Otherwise, nothing happens. */ private void attemptGiveUp() { int response = showConfirmPane("Really give up and show the word?", "Give Up?"); if (response == JOptionPane.YES_OPTION) { lostGame(false); } } /** * Attempts to give the user a hint. */ private void doHint() { if (game.giveHint()) { disableButton(game.lastGuess()); updateCurrentLabel(); updateImages(); updateStatistics(); checkGameState(); } } /** * Returns the currently selected difficulty. * * @return The currently selected difficulty. */ private WordProperties getUserSelectedDifficulty() { WordProperties difficulty = WordProperties.HARD_WORD; if (easyRadioButton.isSelected()) { difficulty = WordProperties.EASY_WORD; } else if (mediumRadioButton.isSelected()) { difficulty = WordProperties.MEDIUM_WORD; } return difficulty; } /** * Returns the actor currently selected by the user. * * @return The actor currently selected by the user. */ private Actor getUserSelectedActor() { Actor actor = Actor.values()[0]; String selected = actorComboBox.getSelectedItem().toString(); for (Actor a : Actor.values()) { if (a.getName().equals(selected)) { actor = a; break; // Each actor is enumerated only once, so okay to break. } } return actor; } /** * Displays the settings {@code JFrame}. */ private void showSettingsFrame() { settingsFrame.setVisible(true); } /** * Updates the game with the currently given set of user settings. */ private void updateGameSettings() { WordProperties difficulty = getUserSelectedDifficulty(); Actor actor = getUserSelectedActor(); game.resetGame(difficulty); game.setActor(actor); dictionaryList.updateUI(); } /** * Updates the current label which displays the current word to accurately * reflect the state of the game. */ private void updateCurrentLabel() { String formatted = StringUtilities.delimit(game.getCorrectGuesses(), ' '); currentWordLabel.setFont(new Font("Tahoma", Font.BOLD, 16)); currentWordLabel.setText(formatted); } /** * Updates the current set of images on the game panel. */ private void updateImages() { int index = game.maxGuesses() - game.getGuessesRemaining() - 1; if (index < game.maxGuesses()) { imageLabel.setIcon(game.getActor().getImageArray()[index]); } } /** * Updates the statistics display panel. */ private void updateStatistics() { String guessed = StringUtilities.sort(game.getAlreadyGuessed().toUpperCase()); guessed = StringUtilities.formattedToString(guessed); guessedField.setText(guessed); int remaining = game.getGuessesRemaining(); guessesLeftField.setText(remaining + ""); String winRate = StringUtilities.doubleAsPercent((double) gamesWon / gamesPlayed); winRateField.setText(winRate); String gameInfo = "Games won/played : " + gamesWon + '/' + gamesPlayed + '.'; winRateField.setToolTipText(gameInfo); winRateLabel.setToolTipText(gameInfo); String cheaterWord = "The current word is " + game.getCurrentWord() + '.'; currentWordLabel.setToolTipText(cheaterWord); String hintText = "Hint"; if (game.correctGuessesToWin() == 1 || game.getGuessesRemaining() == 1) { hintText = "No hints on the last move!"; hintButton.setEnabled(false); } else { int hintsRemaining = game.getHintsRemaining(); if (hintsRemaining > 0) { hintText = "Hint (" + hintsRemaining + ")"; } } hintButton.setText(hintText); } /** * Checks if the user has won or lost the game. */ private void checkGameState() { if (game.hasWon()) { wonGame(false); } else if (!game.canGuess()) { lostGame(false); } } /** * Handles the winner state of the game, making the necessary increments to * the games won and played. * * @param quietMode Flag for displaying a message pane. */ private void wonGame(boolean quietMode) { gamesWon++; gamesPlayed++; if (!quietMode) { gameEnded("Nice guessing! \"" + StringUtilities.asSentence(game.getCurrentWord()) + "\" was the correct word!", "Winner!"); } } /** * Handles the loser state of the game, making the necessary increments to * the games played. * * @param quietMode Flag for displaying a message pane. */ private void lostGame(boolean quietMode) { imageLabel.setIcon(game.getActor().getImageArray()[game.maxGuesses() - 1]); gamesPlayed++; if (!quietMode) { gameEnded("Sorry! \"" + StringUtilities.asSentence(game.getCurrentWord()) + "\" was the correct word!", "Loser!"); } } /** * Ensures the GUI is kept properly updated at the end of a game. * * @param message The message to display a message pane with. * @param title The title of the message pane to display. */ private void gameEnded(String message, String title) { String actual = StringUtilities.delimit(game.getCurrentWord(), ' '); updateStatistics(); currentWordLabel.setText(actual); setStateOf(keyboardPanel, false); giveUpButton.setEnabled(false); hintButton.setEnabled(false); hintButton.setText("Hint"); showMessagePane(message, title); } /** * Displays a {@code JOptionPane} confirmation dialog using the given * arguments. * * @param message The message to display on the pane. * @param title The title of the pane. * @return The outcome of the user input. */ private static int showConfirmPane(String message, String title) { return JOptionPane.showConfirmDialog(null, message, title, JOptionPane.INFORMATION_MESSAGE); } /** * Displays a {@code JOptionPane} information window using the given * arguments. * * @param message The message to display on the pane. * @param title The title of the pane. */ private static void showMessagePane(String message, String title) { JOptionPane.showMessageDialog(null, message, title, JOptionPane.INFORMATION_MESSAGE); } /** * Displays a {@code JOptionPane} information window using the given * arguments. * * @param message The message to display on the pane. * @param title The title of the pane. */ private static void showErrorPane(String message, String title) { JOptionPane.showMessageDialog(null, message, title, JOptionPane.ERROR_MESSAGE); } /** * Prompts the user if they would like to start a new game. * * @param message The message to display on the pane. * @param title The title of the pane. */ protected void newGameDialog(String message, String title) { int response = showConfirmPane(message, title); if (response == JOptionPane.YES_OPTION) { showSettingsFrame(); } } /** * The main method for this package. Creates and displays a * {@code Hangman_GUI} form. * * @param args The command-line arguments. */ public static void main(String args[]) { // Sets the system look and feel try { UIManager.setLookAndFeel(UIManager.getSystemLookAndFeelClassName()); } catch (ClassNotFoundException | InstantiationException | IllegalAccessException | UnsupportedLookAndFeelException ex) { Logger.getLogger(ColorPicker.class.getName()) .log(Level.SEVERE, "Error with look and feel settings. " + "Check if look and feels are installed correctly", ex); } SwingUtilities.invokeLater(() -> { Hangman_GUI gui = new Hangman_GUI(); gui.setVisible(true); gui.newGameDialog("Would you like to start a new game?", "New Game"); }); } }
Fixed a bug where the hint button wouldn't disable after running out of hints
src/game/Hangman_GUI.java
Fixed a bug where the hint button wouldn't disable after running out of hints
<ide><path>rc/game/Hangman_GUI.java <ide> String cheaterWord = "The current word is " + game.getCurrentWord() + '.'; <ide> currentWordLabel.setToolTipText(cheaterWord); <ide> <del> String hintText = "Hint"; <del> if (game.correctGuessesToWin() == 1 || game.getGuessesRemaining() == 1) { <add> String hintText = hintButton.getText(); <add> if ((game.correctGuessesToWin() == 1 || game.getGuessesRemaining() == 1) <add> && hintButton.isEnabled()) { <ide> hintText = "No hints on the last move!"; <ide> hintButton.setEnabled(false); <ide> } <ide> int hintsRemaining = game.getHintsRemaining(); <ide> if (hintsRemaining > 0) { <ide> hintText = "Hint (" + hintsRemaining + ")"; <add> } <add> else { <add> hintText = "Out of hints!"; <add> hintButton.setEnabled(false); <ide> } <ide> } <ide> hintButton.setText(hintText);
Java
apache-2.0
c75668665919b6bfc6716a139bafdd16b7533ad4
0
merlimat/pulsar,massakam/pulsar,yahoo/pulsar,merlimat/pulsar,massakam/pulsar,massakam/pulsar,merlimat/pulsar,yahoo/pulsar,merlimat/pulsar,merlimat/pulsar,massakam/pulsar,massakam/pulsar,massakam/pulsar,yahoo/pulsar,merlimat/pulsar,yahoo/pulsar,yahoo/pulsar,yahoo/pulsar
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.pulsar.client.impl; import static com.google.common.base.Preconditions.checkArgument; import static com.scurrilous.circe.checksum.Crc32cIntChecksum.computeChecksum; import static org.apache.pulsar.common.api.Commands.hasChecksum; import static org.apache.pulsar.common.api.Commands.readChecksum; import com.google.common.annotations.VisibleForTesting; import com.google.common.collect.Iterables; import io.netty.buffer.ByteBuf; import io.netty.util.Timeout; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Optional; import java.util.Set; import java.util.concurrent.CompletableFuture; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutorService; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicIntegerFieldUpdater; import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.locks.ReadWriteLock; import java.util.concurrent.locks.ReentrantReadWriteLock; import java.util.stream.Collectors; import org.apache.commons.lang3.StringUtils; import org.apache.pulsar.client.api.Consumer; import org.apache.pulsar.client.api.ConsumerCryptoFailureAction; import org.apache.pulsar.client.api.ConsumerStats; import org.apache.pulsar.client.api.DeadLetterPolicy; import org.apache.pulsar.client.api.Message; import org.apache.pulsar.client.api.MessageId; import org.apache.pulsar.client.api.Producer; import org.apache.pulsar.client.api.PulsarClientException; import org.apache.pulsar.client.api.Schema; import org.apache.pulsar.client.api.SubscriptionInitialPosition; import org.apache.pulsar.client.api.SubscriptionType; import org.apache.pulsar.client.impl.conf.ConsumerConfigurationData; import org.apache.pulsar.common.api.Commands; import org.apache.pulsar.common.api.EncryptionContext; import org.apache.pulsar.common.api.EncryptionContext.EncryptionKey; import org.apache.pulsar.common.api.proto.PulsarApi; import org.apache.pulsar.common.api.proto.PulsarApi.CommandAck.AckType; import org.apache.pulsar.common.api.proto.PulsarApi.CommandAck.ValidationError; import org.apache.pulsar.common.api.proto.PulsarApi.CommandSubscribe.InitialPosition; import org.apache.pulsar.common.api.proto.PulsarApi.CompressionType; import org.apache.pulsar.common.api.proto.PulsarApi.EncryptionKeys; import org.apache.pulsar.common.api.proto.PulsarApi.KeyValue; import org.apache.pulsar.common.api.proto.PulsarApi.MessageIdData; import org.apache.pulsar.common.api.proto.PulsarApi.MessageMetadata; import org.apache.pulsar.common.api.proto.PulsarApi.ProtocolVersion; import org.apache.pulsar.common.compression.CompressionCodec; import org.apache.pulsar.common.compression.CompressionCodecProvider; import org.apache.pulsar.common.naming.TopicName; import org.apache.pulsar.common.schema.SchemaInfo; import org.apache.pulsar.common.schema.SchemaType; import org.apache.pulsar.common.util.FutureUtil; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class ConsumerImpl<T> extends ConsumerBase<T> implements ConnectionHandler.Connection { private static final int MAX_REDELIVER_UNACKNOWLEDGED = 1000; final long consumerId; // Number of messages that have delivered to the application. Every once in a while, this number will be sent to the // broker to notify that we are ready to get (and store in the incoming messages queue) more messages @SuppressWarnings("rawtypes") private static final AtomicIntegerFieldUpdater<ConsumerImpl> AVAILABLE_PERMITS_UPDATER = AtomicIntegerFieldUpdater .newUpdater(ConsumerImpl.class, "availablePermits"); @SuppressWarnings("unused") private volatile int availablePermits = 0; protected volatile MessageId lastDequeuedMessage = MessageId.earliest; private volatile MessageId lastMessageIdInBroker = MessageId.earliest; private long subscribeTimeout; private final int partitionIndex; private final int receiverQueueRefillThreshold; private final ReadWriteLock lock = new ReentrantReadWriteLock(); private final UnAckedMessageTracker unAckedMessageTracker; private final AcknowledgmentsGroupingTracker acknowledgmentsGroupingTracker; private final NegativeAcksTracker negativeAcksTracker; protected final ConsumerStatsRecorder stats; private final int priorityLevel; private final SubscriptionMode subscriptionMode; private volatile BatchMessageIdImpl startMessageId; private volatile boolean hasReachedEndOfTopic; private final MessageCrypto msgCrypto; private final Map<String, String> metadata; private final boolean readCompacted; private final SubscriptionInitialPosition subscriptionInitialPosition; private final ConnectionHandler connectionHandler; private final TopicName topicName; private final String topicNameWithoutPartition; private final Map<MessageIdImpl, List<MessageImpl<T>>> possibleSendToDeadLetterTopicMessages; private final DeadLetterPolicy deadLetterPolicy; private Producer<T> deadLetterProducer; private final long backoffIntervalNanos; private final long maxBackoffIntervalNanos; protected volatile boolean paused; enum SubscriptionMode { // Make the subscription to be backed by a durable cursor that will retain messages and persist the current // position Durable, // Lightweight subscription mode that doesn't have a durable cursor associated NonDurable } static <T> ConsumerImpl<T> newConsumerImpl(PulsarClientImpl client, String topic, ConsumerConfigurationData<T> conf, ExecutorService listenerExecutor, int partitionIndex, CompletableFuture<Consumer<T>> subscribeFuture, SubscriptionMode subscriptionMode, MessageId startMessageId, Schema<T> schema, ConsumerInterceptors<T> interceptors) { return ConsumerImpl.newConsumerImpl(client, topic, conf, listenerExecutor, partitionIndex, subscribeFuture, subscriptionMode, startMessageId, schema, interceptors, Backoff.DEFAULT_INTERVAL_IN_NANOSECONDS, Backoff.MAX_BACKOFF_INTERVAL_NANOSECONDS); } static <T> ConsumerImpl<T> newConsumerImpl(PulsarClientImpl client, String topic, ConsumerConfigurationData<T> conf, ExecutorService listenerExecutor, int partitionIndex, CompletableFuture<Consumer<T>> subscribeFuture, SubscriptionMode subscriptionMode, MessageId startMessageId, Schema<T> schema, ConsumerInterceptors<T> interceptors, long backoffIntervalNanos, long maxBackoffIntervalNanos) { if (conf.getReceiverQueueSize() == 0) { return new ZeroQueueConsumerImpl<>(client, topic, conf, listenerExecutor, partitionIndex, subscribeFuture, subscriptionMode, startMessageId, schema, interceptors, backoffIntervalNanos, maxBackoffIntervalNanos); } else { return new ConsumerImpl<>(client, topic, conf, listenerExecutor, partitionIndex, subscribeFuture, subscriptionMode, startMessageId, schema, interceptors, backoffIntervalNanos, maxBackoffIntervalNanos); } } protected ConsumerImpl(PulsarClientImpl client, String topic, ConsumerConfigurationData<T> conf, ExecutorService listenerExecutor, int partitionIndex, CompletableFuture<Consumer<T>> subscribeFuture, SubscriptionMode subscriptionMode, MessageId startMessageId, Schema<T> schema, ConsumerInterceptors<T> interceptors, long backoffIntervalNanos, long maxBackoffIntervalNanos) { super(client, topic, conf, conf.getReceiverQueueSize(), listenerExecutor, subscribeFuture, schema, interceptors); this.consumerId = client.newConsumerId(); this.subscriptionMode = subscriptionMode; this.startMessageId = startMessageId != null ? new BatchMessageIdImpl((MessageIdImpl) startMessageId) : null; AVAILABLE_PERMITS_UPDATER.set(this, 0); this.subscribeTimeout = System.currentTimeMillis() + client.getConfiguration().getOperationTimeoutMs(); this.partitionIndex = partitionIndex; this.receiverQueueRefillThreshold = conf.getReceiverQueueSize() / 2; this.priorityLevel = conf.getPriorityLevel(); this.readCompacted = conf.isReadCompacted(); this.subscriptionInitialPosition = conf.getSubscriptionInitialPosition(); this.negativeAcksTracker = new NegativeAcksTracker(this, conf); if (client.getConfiguration().getStatsIntervalSeconds() > 0) { stats = new ConsumerStatsRecorderImpl(client, conf, this); } else { stats = ConsumerStatsDisabled.INSTANCE; } if (conf.getAckTimeoutMillis() != 0) { if (conf.getTickDurationMillis() > 0) { this.unAckedMessageTracker = new UnAckedMessageTracker(client, this, conf.getAckTimeoutMillis(), Math.min(conf.getTickDurationMillis(), conf.getAckTimeoutMillis())); } else { this.unAckedMessageTracker = new UnAckedMessageTracker(client, this, conf.getAckTimeoutMillis()); } } else { this.unAckedMessageTracker = UnAckedMessageTracker.UNACKED_MESSAGE_TRACKER_DISABLED; } // Create msgCrypto if not created already if (conf.getCryptoKeyReader() != null) { this.msgCrypto = new MessageCrypto(String.format("[%s] [%s]", topic, subscription), false); } else { this.msgCrypto = null; } if (conf.getProperties().isEmpty()) { metadata = Collections.emptyMap(); } else { metadata = Collections.unmodifiableMap(new HashMap<>(conf.getProperties())); } this.connectionHandler = new ConnectionHandler(this, new BackoffBuilder() .setInitialTime(100, TimeUnit.MILLISECONDS) .setMax(60, TimeUnit.SECONDS) .setMandatoryStop(0, TimeUnit.MILLISECONDS) .useUserConfiguredIntervals(backoffIntervalNanos, maxBackoffIntervalNanos) .create(), this); this.topicName = TopicName.get(topic); if (this.topicName.isPersistent()) { this.acknowledgmentsGroupingTracker = new PersistentAcknowledgmentsGroupingTracker(this, conf, client.eventLoopGroup()); } else { this.acknowledgmentsGroupingTracker = NonPersistentAcknowledgmentGroupingTracker.of(); } if (conf.getDeadLetterPolicy() != null) { possibleSendToDeadLetterTopicMessages = new ConcurrentHashMap<>(); if (StringUtils.isNotBlank(conf.getDeadLetterPolicy().getDeadLetterTopic())) { this.deadLetterPolicy = DeadLetterPolicy.builder() .maxRedeliverCount(conf.getDeadLetterPolicy().getMaxRedeliverCount()) .deadLetterTopic(conf.getDeadLetterPolicy().getDeadLetterTopic()) .build(); } else { this.deadLetterPolicy = DeadLetterPolicy.builder() .maxRedeliverCount(conf.getDeadLetterPolicy().getMaxRedeliverCount()) .deadLetterTopic(String.format("%s-%s-DLQ", topic, subscription)) .build(); } } else { deadLetterPolicy = null; possibleSendToDeadLetterTopicMessages = null; } this.backoffIntervalNanos = backoffIntervalNanos; this.maxBackoffIntervalNanos = maxBackoffIntervalNanos; topicNameWithoutPartition = topicName.getPartitionedTopicName(); grabCnx(); } public ConnectionHandler getConnectionHandler() { return connectionHandler; } public UnAckedMessageTracker getUnAckedMessageTracker() { return unAckedMessageTracker; } @Override public CompletableFuture<Void> unsubscribeAsync() { if (getState() == State.Closing || getState() == State.Closed) { return FutureUtil .failedFuture(new PulsarClientException.AlreadyClosedException("Consumer was already closed")); } final CompletableFuture<Void> unsubscribeFuture = new CompletableFuture<>(); if (isConnected()) { setState(State.Closing); long requestId = client.newRequestId(); ByteBuf unsubscribe = Commands.newUnsubscribe(consumerId, requestId); ClientCnx cnx = cnx(); cnx.sendRequestWithId(unsubscribe, requestId).thenRun(() -> { cnx.removeConsumer(consumerId); unAckedMessageTracker.close(); if (possibleSendToDeadLetterTopicMessages != null) { possibleSendToDeadLetterTopicMessages.clear(); } client.cleanupConsumer(ConsumerImpl.this); log.info("[{}][{}] Successfully unsubscribed from topic", topic, subscription); setState(State.Closed); unsubscribeFuture.complete(null); }).exceptionally(e -> { log.error("[{}][{}] Failed to unsubscribe: {}", topic, subscription, e.getCause().getMessage()); setState(State.Ready); unsubscribeFuture.completeExceptionally(e.getCause()); return null; }); } else { unsubscribeFuture.completeExceptionally(new PulsarClientException("Not connected to broker")); } return unsubscribeFuture; } @Override protected Message<T> internalReceive() throws PulsarClientException { Message<T> message; try { message = incomingMessages.take(); trackMessage(message); Message<T> interceptMsg = beforeConsume(message); messageProcessed(interceptMsg); return interceptMsg; } catch (InterruptedException e) { Thread.currentThread().interrupt(); stats.incrementNumReceiveFailed(); throw new PulsarClientException(e); } } @Override protected CompletableFuture<Message<T>> internalReceiveAsync() { CompletableFuture<Message<T>> result = new CompletableFuture<>(); Message<T> message = null; try { lock.writeLock().lock(); message = incomingMessages.poll(0, TimeUnit.MILLISECONDS); if (message == null) { pendingReceives.add(result); } } catch (InterruptedException e) { Thread.currentThread().interrupt(); result.completeExceptionally(e); } finally { lock.writeLock().unlock(); } if (message != null) { trackMessage(message); Message<T> interceptMsg = beforeConsume(message); messageProcessed(interceptMsg); result.complete(interceptMsg); } return result; } @Override protected Message<T> internalReceive(int timeout, TimeUnit unit) throws PulsarClientException { Message<T> message; try { message = incomingMessages.poll(timeout, unit); trackMessage(message); Message<T> interceptMsg = beforeConsume(message); if (interceptMsg != null) { messageProcessed(interceptMsg); } return interceptMsg; } catch (InterruptedException e) { Thread.currentThread().interrupt(); State state = getState(); if (state != State.Closing && state != State.Closed) { stats.incrementNumReceiveFailed(); throw new PulsarClientException(e); } else { return null; } } } boolean markAckForBatchMessage(BatchMessageIdImpl batchMessageId, AckType ackType, Map<String,Long> properties) { boolean isAllMsgsAcked; if (ackType == AckType.Individual) { isAllMsgsAcked = batchMessageId.ackIndividual(); } else { isAllMsgsAcked = batchMessageId.ackCumulative(); } int outstandingAcks = 0; if (log.isDebugEnabled()) { outstandingAcks = batchMessageId.getOutstandingAcksInSameBatch(); } int batchSize = batchMessageId.getBatchSize(); // all messages in this batch have been acked if (isAllMsgsAcked) { if (log.isDebugEnabled()) { log.debug("[{}] [{}] can ack message to broker {}, acktype {}, cardinality {}, length {}", subscription, consumerName, batchMessageId, ackType, outstandingAcks, batchSize); } return true; } else { if (AckType.Cumulative == ackType && !batchMessageId.getAcker().isPrevBatchCumulativelyAcked()) { sendAcknowledge(batchMessageId.prevBatchMessageId(), AckType.Cumulative, properties); batchMessageId.getAcker().setPrevBatchCumulativelyAcked(true); } else { onAcknowledge(batchMessageId, null); } if (log.isDebugEnabled()) { log.debug("[{}] [{}] cannot ack message to broker {}, acktype {}, pending acks - {}", subscription, consumerName, batchMessageId, ackType, outstandingAcks); } } return false; } @Override protected CompletableFuture<Void> doAcknowledge(MessageId messageId, AckType ackType, Map<String,Long> properties) { checkArgument(messageId instanceof MessageIdImpl); if (getState() != State.Ready && getState() != State.Connecting) { stats.incrementNumAcksFailed(); PulsarClientException exception = new PulsarClientException("Consumer not ready. State: " + getState()); if (AckType.Individual.equals(ackType)) { onAcknowledge(messageId, exception); } else if (AckType.Cumulative.equals(ackType)) { onAcknowledgeCumulative(messageId, exception); } return FutureUtil.failedFuture(exception); } if (messageId instanceof BatchMessageIdImpl) { if (markAckForBatchMessage((BatchMessageIdImpl) messageId, ackType, properties)) { // all messages in batch have been acked so broker can be acked via sendAcknowledge() if (log.isDebugEnabled()) { log.debug("[{}] [{}] acknowledging message - {}, acktype {}", subscription, consumerName, messageId, ackType); } } else { // other messages in batch are still pending ack. return CompletableFuture.completedFuture(null); } } return sendAcknowledge(messageId, ackType, properties); } private CompletableFuture<Void> sendAcknowledge(MessageId messageId, AckType ackType, Map<String,Long> properties) { MessageIdImpl msgId = (MessageIdImpl) messageId; if (ackType == AckType.Individual) { if (messageId instanceof BatchMessageIdImpl) { BatchMessageIdImpl batchMessageId = (BatchMessageIdImpl) messageId; stats.incrementNumAcksSent(batchMessageId.getBatchSize()); unAckedMessageTracker.remove(new MessageIdImpl(batchMessageId.getLedgerId(), batchMessageId.getEntryId(), batchMessageId.getPartitionIndex())); if (possibleSendToDeadLetterTopicMessages != null) { possibleSendToDeadLetterTopicMessages.remove(new MessageIdImpl(batchMessageId.getLedgerId(), batchMessageId.getEntryId(), batchMessageId.getPartitionIndex())); } } else { // increment counter by 1 for non-batch msg unAckedMessageTracker.remove(msgId); if (possibleSendToDeadLetterTopicMessages != null) { possibleSendToDeadLetterTopicMessages.remove(msgId); } stats.incrementNumAcksSent(1); } onAcknowledge(messageId, null); } else if (ackType == AckType.Cumulative) { onAcknowledgeCumulative(messageId, null); stats.incrementNumAcksSent(unAckedMessageTracker.removeMessagesTill(msgId)); } acknowledgmentsGroupingTracker.addAcknowledgment(msgId, ackType, properties); // Consumer acknowledgment operation immediately succeeds. In any case, if we're not able to send ack to broker, // the messages will be re-delivered return CompletableFuture.completedFuture(null); } @Override public void negativeAcknowledge(MessageId messageId) { negativeAcksTracker.add(messageId); // Ensure the message is not redelivered for ack-timeout, since we did receive an "ack" unAckedMessageTracker.remove(messageId); } @Override public void connectionOpened(final ClientCnx cnx) { setClientCnx(cnx); cnx.registerConsumer(consumerId, this); log.info("[{}][{}] Subscribing to topic on cnx {}", topic, subscription, cnx.ctx().channel()); long requestId = client.newRequestId(); int currentSize; synchronized (this) { currentSize = incomingMessages.size(); startMessageId = clearReceiverQueue(); if (possibleSendToDeadLetterTopicMessages != null) { possibleSendToDeadLetterTopicMessages.clear(); } } boolean isDurable = subscriptionMode == SubscriptionMode.Durable; MessageIdData startMessageIdData; if (isDurable) { // For regular durable subscriptions, the message id from where to restart will be determined by the broker. startMessageIdData = null; } else { // For non-durable we are going to restart from the next entry MessageIdData.Builder builder = MessageIdData.newBuilder(); builder.setLedgerId(startMessageId.getLedgerId()); builder.setEntryId(startMessageId.getEntryId()); if (startMessageId instanceof BatchMessageIdImpl) { builder.setBatchIndex(((BatchMessageIdImpl) startMessageId).getBatchIndex()); } startMessageIdData = builder.build(); builder.recycle(); } SchemaInfo si = schema.getSchemaInfo(); if (si != null && (SchemaType.BYTES == si.getType() || SchemaType.NONE == si.getType())) { // don't set schema for Schema.BYTES si = null; } ByteBuf request = Commands.newSubscribe(topic, subscription, consumerId, requestId, getSubType(), priorityLevel, consumerName, isDurable, startMessageIdData, metadata, readCompacted, conf.isReplicateSubscriptionState(), InitialPosition.valueOf(subscriptionInitialPosition.getValue()), si); if (startMessageIdData != null) { startMessageIdData.recycle(); } cnx.sendRequestWithId(request, requestId).thenRun(() -> { synchronized (ConsumerImpl.this) { if (changeToReadyState()) { consumerIsReconnectedToBroker(cnx, currentSize); } else { // Consumer was closed while reconnecting, close the connection to make sure the broker // drops the consumer on its side setState(State.Closed); cnx.removeConsumer(consumerId); cnx.channel().close(); return; } } resetBackoff(); boolean firstTimeConnect = subscribeFuture.complete(this); // if the consumer is not partitioned or is re-connected and is partitioned, we send the flow // command to receive messages. // For readers too (isDurable==false), the partition idx will be set though we have to // send available permits immediately after establishing the reader session if (!(firstTimeConnect && partitionIndex > -1 && isDurable) && conf.getReceiverQueueSize() != 0) { sendFlowPermitsToBroker(cnx, conf.getReceiverQueueSize()); } }).exceptionally((e) -> { cnx.removeConsumer(consumerId); if (getState() == State.Closing || getState() == State.Closed) { // Consumer was closed while reconnecting, close the connection to make sure the broker // drops the consumer on its side cnx.channel().close(); return null; } log.warn("[{}][{}] Failed to subscribe to topic on {}", topic, subscription, cnx.channel().remoteAddress()); if (e.getCause() instanceof PulsarClientException && getConnectionHandler().isRetriableError((PulsarClientException) e.getCause()) && System.currentTimeMillis() < subscribeTimeout) { reconnectLater(e.getCause()); return null; } if (!subscribeFuture.isDone()) { // unable to create new consumer, fail operation setState(State.Failed); subscribeFuture.completeExceptionally(e); client.cleanupConsumer(this); } else { // consumer was subscribed and connected but we got some error, keep trying reconnectLater(e.getCause()); } return null; }); } protected void consumerIsReconnectedToBroker(ClientCnx cnx, int currentQueueSize) { log.info("[{}][{}] Subscribed to topic on {} -- consumer: {}", topic, subscription, cnx.channel().remoteAddress(), consumerId); AVAILABLE_PERMITS_UPDATER.set(this, 0); } /** * Clear the internal receiver queue and returns the message id of what was the 1st message in the queue that was * not seen by the application */ private BatchMessageIdImpl clearReceiverQueue() { List<Message<?>> currentMessageQueue = new ArrayList<>(incomingMessages.size()); incomingMessages.drainTo(currentMessageQueue); if (!currentMessageQueue.isEmpty()) { MessageIdImpl nextMessageInQueue = (MessageIdImpl) currentMessageQueue.get(0).getMessageId(); BatchMessageIdImpl previousMessage; if (nextMessageInQueue instanceof BatchMessageIdImpl) { // Get on the previous message within the current batch previousMessage = new BatchMessageIdImpl(nextMessageInQueue.getLedgerId(), nextMessageInQueue.getEntryId(), nextMessageInQueue.getPartitionIndex(), ((BatchMessageIdImpl) nextMessageInQueue).getBatchIndex() - 1); } else { // Get on previous message in previous entry previousMessage = new BatchMessageIdImpl(nextMessageInQueue.getLedgerId(), nextMessageInQueue.getEntryId() - 1, nextMessageInQueue.getPartitionIndex(), -1); } return previousMessage; } else if (!lastDequeuedMessage.equals(MessageId.earliest)) { // If the queue was empty we need to restart from the message just after the last one that has been dequeued // in the past return new BatchMessageIdImpl((MessageIdImpl) lastDequeuedMessage); } else { // No message was received or dequeued by this consumer. Next message would still be the startMessageId return startMessageId; } } /** * send the flow command to have the broker start pushing messages */ void sendFlowPermitsToBroker(ClientCnx cnx, int numMessages) { if (cnx != null) { if (log.isDebugEnabled()) { log.debug("[{}] [{}] Adding {} additional permits", topic, subscription, numMessages); } cnx.ctx().writeAndFlush(Commands.newFlow(consumerId, numMessages), cnx.ctx().voidPromise()); } } @Override public void connectionFailed(PulsarClientException exception) { if (System.currentTimeMillis() > subscribeTimeout && subscribeFuture.completeExceptionally(exception)) { setState(State.Failed); log.info("[{}] Consumer creation failed for consumer {}", topic, consumerId); client.cleanupConsumer(this); } } @Override public CompletableFuture<Void> closeAsync() { if (getState() == State.Closing || getState() == State.Closed) { unAckedMessageTracker.close(); if (possibleSendToDeadLetterTopicMessages != null) { possibleSendToDeadLetterTopicMessages.clear(); } return CompletableFuture.completedFuture(null); } if (!isConnected()) { log.info("[{}] [{}] Closed Consumer (not connected)", topic, subscription); setState(State.Closed); unAckedMessageTracker.close(); if (possibleSendToDeadLetterTopicMessages != null) { possibleSendToDeadLetterTopicMessages.clear(); } client.cleanupConsumer(this); return CompletableFuture.completedFuture(null); } stats.getStatTimeout().ifPresent(Timeout::cancel); setState(State.Closing); acknowledgmentsGroupingTracker.close(); long requestId = client.newRequestId(); CompletableFuture<Void> closeFuture = new CompletableFuture<>(); ClientCnx cnx = cnx(); if (null == cnx) { cleanupAtClose(closeFuture); } else { ByteBuf cmd = Commands.newCloseConsumer(consumerId, requestId); cnx.sendRequestWithId(cmd, requestId).handle((v, exception) -> { cnx.removeConsumer(consumerId); if (exception == null || !cnx.ctx().channel().isActive()) { cleanupAtClose(closeFuture); } else { closeFuture.completeExceptionally(exception); } return null; }); } return closeFuture; } private void cleanupAtClose(CompletableFuture<Void> closeFuture) { log.info("[{}] [{}] Closed consumer", topic, subscription); setState(State.Closed); unAckedMessageTracker.close(); if (possibleSendToDeadLetterTopicMessages != null) { possibleSendToDeadLetterTopicMessages.clear(); } closeFuture.complete(null); client.cleanupConsumer(this); // fail all pending-receive futures to notify application failPendingReceive(); } private void failPendingReceive() { lock.readLock().lock(); try { if (listenerExecutor != null && !listenerExecutor.isShutdown()) { while (!pendingReceives.isEmpty()) { CompletableFuture<Message<T>> receiveFuture = pendingReceives.poll(); if (receiveFuture != null) { receiveFuture.completeExceptionally( new PulsarClientException.AlreadyClosedException("Consumer is already closed")); } else { break; } } } } finally { lock.readLock().unlock(); } } void activeConsumerChanged(boolean isActive) { if (consumerEventListener == null) { return; } listenerExecutor.execute(() -> { if (isActive) { consumerEventListener.becameActive(this, partitionIndex); } else { consumerEventListener.becameInactive(this, partitionIndex); } }); } void messageReceived(MessageIdData messageId, int redeliveryCount, ByteBuf headersAndPayload, ClientCnx cnx) { if (log.isDebugEnabled()) { log.debug("[{}][{}] Received message: {}/{}", topic, subscription, messageId.getLedgerId(), messageId.getEntryId()); } if (!verifyChecksum(headersAndPayload, messageId)) { // discard message with checksum error discardCorruptedMessage(messageId, cnx, ValidationError.ChecksumMismatch); return; } MessageMetadata msgMetadata; try { msgMetadata = Commands.parseMessageMetadata(headersAndPayload); } catch (Throwable t) { discardCorruptedMessage(messageId, cnx, ValidationError.ChecksumMismatch); return; } final int numMessages = msgMetadata.getNumMessagesInBatch(); MessageIdImpl msgId = new MessageIdImpl(messageId.getLedgerId(), messageId.getEntryId(), getPartitionIndex()); if (acknowledgmentsGroupingTracker.isDuplicate(msgId)) { if (log.isDebugEnabled()) { log.debug("[{}] [{}] Ignoring message as it was already being acked earlier by same consumer {}/{}", topic, subscription, consumerName, msgId); } increaseAvailablePermits(cnx, numMessages); return; } ByteBuf decryptedPayload = decryptPayloadIfNeeded(messageId, msgMetadata, headersAndPayload, cnx); boolean isMessageUndecryptable = isMessageUndecryptable(msgMetadata); if (decryptedPayload == null) { // Message was discarded or CryptoKeyReader isn't implemented return; } // uncompress decryptedPayload and release decryptedPayload-ByteBuf ByteBuf uncompressedPayload = isMessageUndecryptable ? decryptedPayload.retain() : uncompressPayloadIfNeeded(messageId, msgMetadata, decryptedPayload, cnx); decryptedPayload.release(); if (uncompressedPayload == null) { // Message was discarded on decompression error return; } // if message is not decryptable then it can't be parsed as a batch-message. so, add EncyrptionCtx to message // and return undecrypted payload if (isMessageUndecryptable || (numMessages == 1 && !msgMetadata.hasNumMessagesInBatch())) { final MessageImpl<T> message = new MessageImpl<>(topicName.toString(), msgId, msgMetadata, uncompressedPayload, createEncryptionContext(msgMetadata), cnx, schema, redeliveryCount); uncompressedPayload.release(); msgMetadata.recycle(); lock.readLock().lock(); try { // Enqueue the message so that it can be retrieved when application calls receive() // if the conf.getReceiverQueueSize() is 0 then discard message if no one is waiting for it. // if asyncReceive is waiting then notify callback without adding to incomingMessages queue if (deadLetterPolicy != null && possibleSendToDeadLetterTopicMessages != null && redeliveryCount >= deadLetterPolicy.getMaxRedeliverCount()) { possibleSendToDeadLetterTopicMessages.put((MessageIdImpl)message.getMessageId(), Collections.singletonList(message)); } if (!pendingReceives.isEmpty()) { trackMessage(message); notifyPendingReceivedCallback(message, null); } else if (canEnqueueMessage(message)) { incomingMessages.add(message); } } finally { lock.readLock().unlock(); } } else { // handle batch message enqueuing; uncompressed payload has all messages in batch receiveIndividualMessagesFromBatch(msgMetadata, redeliveryCount, uncompressedPayload, messageId, cnx); uncompressedPayload.release(); msgMetadata.recycle(); } if (listener != null) { triggerListener(numMessages); } } protected void triggerListener(int numMessages) { // Trigger the notification on the message listener in a separate thread to avoid blocking the networking // thread while the message processing happens listenerExecutor.execute(() -> { for (int i = 0; i < numMessages; i++) { try { Message<T> msg = internalReceive(0, TimeUnit.MILLISECONDS); // complete the callback-loop in case queue is cleared up if (msg == null) { if (log.isDebugEnabled()) { log.debug("[{}] [{}] Message has been cleared from the queue", topic, subscription); } break; } try { if (log.isDebugEnabled()) { log.debug("[{}][{}] Calling message listener for message {}", topic, subscription, msg.getMessageId()); } listener.received(ConsumerImpl.this, msg); } catch (Throwable t) { log.error("[{}][{}] Message listener error in processing message: {}", topic, subscription, msg.getMessageId(), t); } } catch (PulsarClientException e) { log.warn("[{}] [{}] Failed to dequeue the message for listener", topic, subscription, e); return; } } }); } protected boolean canEnqueueMessage(Message<T> message) { // Default behavior, can be overridden in subclasses return true; } /** * Notify waiting asyncReceive request with the received message * * @param message */ void notifyPendingReceivedCallback(final Message<T> message, Exception exception) { if (pendingReceives.isEmpty()) { return; } // fetch receivedCallback from queue final CompletableFuture<Message<T>> receivedFuture = pendingReceives.poll(); if (receivedFuture == null) { return; } if (exception != null) { listenerExecutor.execute(() -> receivedFuture.completeExceptionally(exception)); return; } if (message == null) { IllegalStateException e = new IllegalStateException("received message can't be null"); listenerExecutor.execute(() -> receivedFuture.completeExceptionally(e)); return; } if (conf.getReceiverQueueSize() == 0) { // call interceptor and complete received callback interceptAndComplete(message, receivedFuture); return; } // increase permits for available message-queue messageProcessed(message); // call interceptor and complete received callback interceptAndComplete(message, receivedFuture); } private void interceptAndComplete(final Message<T> message, final CompletableFuture<Message<T>> receivedFuture) { // call proper interceptor final Message<T> interceptMessage = beforeConsume(message); // return message to receivedCallback listenerExecutor.execute(() -> receivedFuture.complete(interceptMessage)); } void receiveIndividualMessagesFromBatch(MessageMetadata msgMetadata, int redeliveryCount, ByteBuf uncompressedPayload, MessageIdData messageId, ClientCnx cnx) { int batchSize = msgMetadata.getNumMessagesInBatch(); // create ack tracker for entry aka batch MessageIdImpl batchMessage = new MessageIdImpl(messageId.getLedgerId(), messageId.getEntryId(), getPartitionIndex()); BatchMessageAcker acker = BatchMessageAcker.newAcker(batchSize); List<MessageImpl<T>> possibleToDeadLetter = null; if (deadLetterPolicy != null && redeliveryCount >= deadLetterPolicy.getMaxRedeliverCount()) { possibleToDeadLetter = new ArrayList<>(); } int skippedMessages = 0; try { for (int i = 0; i < batchSize; ++i) { if (log.isDebugEnabled()) { log.debug("[{}] [{}] processing message num - {} in batch", subscription, consumerName, i); } PulsarApi.SingleMessageMetadata.Builder singleMessageMetadataBuilder = PulsarApi.SingleMessageMetadata .newBuilder(); ByteBuf singleMessagePayload = Commands.deSerializeSingleMessageInBatch(uncompressedPayload, singleMessageMetadataBuilder, i, batchSize); if (subscriptionMode == SubscriptionMode.NonDurable && startMessageId != null && messageId.getLedgerId() == startMessageId.getLedgerId() && messageId.getEntryId() == startMessageId.getEntryId() && i <= startMessageId.getBatchIndex()) { // If we are receiving a batch message, we need to discard messages that were prior // to the startMessageId if (log.isDebugEnabled()) { log.debug("[{}] [{}] Ignoring message from before the startMessageId", subscription, consumerName); } singleMessagePayload.release(); singleMessageMetadataBuilder.recycle(); ++skippedMessages; continue; } if (singleMessageMetadataBuilder.getCompactedOut()) { // message has been compacted out, so don't send to the user singleMessagePayload.release(); singleMessageMetadataBuilder.recycle(); ++skippedMessages; continue; } BatchMessageIdImpl batchMessageIdImpl = new BatchMessageIdImpl(messageId.getLedgerId(), messageId.getEntryId(), getPartitionIndex(), i, acker); final MessageImpl<T> message = new MessageImpl<>(topicName.toString(), batchMessageIdImpl, msgMetadata, singleMessageMetadataBuilder.build(), singleMessagePayload, createEncryptionContext(msgMetadata), cnx, schema, redeliveryCount); if (possibleToDeadLetter != null) { possibleToDeadLetter.add(message); } lock.readLock().lock(); try { if (pendingReceives.isEmpty()) { incomingMessages.add(message); } else { notifyPendingReceivedCallback(message, null); } } finally { lock.readLock().unlock(); } singleMessagePayload.release(); singleMessageMetadataBuilder.recycle(); } } catch (IOException e) { log.warn("[{}] [{}] unable to obtain message in batch", subscription, consumerName); discardCorruptedMessage(messageId, cnx, ValidationError.BatchDeSerializeError); } if (possibleToDeadLetter != null && possibleSendToDeadLetterTopicMessages != null) { possibleSendToDeadLetterTopicMessages.put(batchMessage, possibleToDeadLetter); } if (log.isDebugEnabled()) { log.debug("[{}] [{}] enqueued messages in batch. queue size - {}, available queue size - {}", subscription, consumerName, incomingMessages.size(), incomingMessages.remainingCapacity()); } if (skippedMessages > 0) { increaseAvailablePermits(cnx, skippedMessages); } } /** * Record the event that one message has been processed by the application. * * Periodically, it sends a Flow command to notify the broker that it can push more messages */ protected synchronized void messageProcessed(Message<?> msg) { ClientCnx currentCnx = cnx(); ClientCnx msgCnx = ((MessageImpl<?>) msg).getCnx(); lastDequeuedMessage = msg.getMessageId(); if (msgCnx != currentCnx) { // The processed message did belong to the old queue that was cleared after reconnection. return; } increaseAvailablePermits(currentCnx); stats.updateNumMsgsReceived(msg); if (conf.getAckTimeoutMillis() != 0) { // reset timer for messages that are received by the client MessageIdImpl id = (MessageIdImpl) msg.getMessageId(); if (id instanceof BatchMessageIdImpl) { id = new MessageIdImpl(id.getLedgerId(), id.getEntryId(), getPartitionIndex()); } if (partitionIndex != -1) { // we should no longer track this message, TopicsConsumer will take care from now onwards unAckedMessageTracker.remove(id); } else { unAckedMessageTracker.add(id); } } } protected void trackMessage(Message<?> msg) { if (msg != null) { MessageId messageId = msg.getMessageId(); if (conf.getAckTimeoutMillis() > 0 && messageId instanceof MessageIdImpl) { MessageIdImpl id = (MessageIdImpl)messageId; if (id instanceof BatchMessageIdImpl) { // do not add each item in batch message into tracker id = new MessageIdImpl(id.getLedgerId(), id.getEntryId(), getPartitionIndex()); } unAckedMessageTracker.add(id); } } } void increaseAvailablePermits(ClientCnx currentCnx) { increaseAvailablePermits(currentCnx, 1); } private void increaseAvailablePermits(ClientCnx currentCnx, int delta) { int available = AVAILABLE_PERMITS_UPDATER.addAndGet(this, delta); while (available >= receiverQueueRefillThreshold && !paused) { if (AVAILABLE_PERMITS_UPDATER.compareAndSet(this, available, 0)) { sendFlowPermitsToBroker(currentCnx, available); break; } else { available = AVAILABLE_PERMITS_UPDATER.get(this); } } } @Override public void pause() { paused = true; } @Override public void resume() { if (paused) { paused = false; increaseAvailablePermits(cnx(), 0); } } private ByteBuf decryptPayloadIfNeeded(MessageIdData messageId, MessageMetadata msgMetadata, ByteBuf payload, ClientCnx currentCnx) { if (msgMetadata.getEncryptionKeysCount() == 0) { return payload.retain(); } // If KeyReader is not configured throw exception based on config param if (conf.getCryptoKeyReader() == null) { switch (conf.getCryptoFailureAction()) { case CONSUME: log.warn("[{}][{}][{}] CryptoKeyReader interface is not implemented. Consuming encrypted message.", topic, subscription, consumerName); return payload.retain(); case DISCARD: log.warn( "[{}][{}][{}] Skipping decryption since CryptoKeyReader interface is not implemented and config is set to discard", topic, subscription, consumerName); discardMessage(messageId, currentCnx, ValidationError.DecryptionError); return null; case FAIL: MessageId m = new MessageIdImpl(messageId.getLedgerId(), messageId.getEntryId(), partitionIndex); log.error( "[{}][{}][{}][{}] Message delivery failed since CryptoKeyReader interface is not implemented to consume encrypted message", topic, subscription, consumerName, m); unAckedMessageTracker.add(m); return null; } } ByteBuf decryptedData = this.msgCrypto.decrypt(msgMetadata, payload, conf.getCryptoKeyReader()); if (decryptedData != null) { return decryptedData; } switch (conf.getCryptoFailureAction()) { case CONSUME: // Note, batch message will fail to consume even if config is set to consume log.warn("[{}][{}][{}][{}] Decryption failed. Consuming encrypted message since config is set to consume.", topic, subscription, consumerName, messageId); return payload.retain(); case DISCARD: log.warn("[{}][{}][{}][{}] Discarding message since decryption failed and config is set to discard", topic, subscription, consumerName, messageId); discardMessage(messageId, currentCnx, ValidationError.DecryptionError); return null; case FAIL: MessageId m = new MessageIdImpl(messageId.getLedgerId(), messageId.getEntryId(), partitionIndex); log.error( "[{}][{}][{}][{}] Message delivery failed since unable to decrypt incoming message", topic, subscription, consumerName, m); unAckedMessageTracker.add(m); return null; } return null; } private ByteBuf uncompressPayloadIfNeeded(MessageIdData messageId, MessageMetadata msgMetadata, ByteBuf payload, ClientCnx currentCnx) { CompressionType compressionType = msgMetadata.getCompression(); CompressionCodec codec = CompressionCodecProvider.getCompressionCodec(compressionType); int uncompressedSize = msgMetadata.getUncompressedSize(); int payloadSize = payload.readableBytes(); if (payloadSize > ClientCnx.getMaxMessageSize()) { // payload size is itself corrupted since it cannot be bigger than the MaxMessageSize log.error("[{}][{}] Got corrupted payload message size {} at {}", topic, subscription, payloadSize, messageId); discardCorruptedMessage(messageId, currentCnx, ValidationError.UncompressedSizeCorruption); return null; } try { ByteBuf uncompressedPayload = codec.decode(payload, uncompressedSize); return uncompressedPayload; } catch (IOException e) { log.error("[{}][{}] Failed to decompress message with {} at {}: {}", topic, subscription, compressionType, messageId, e.getMessage(), e); discardCorruptedMessage(messageId, currentCnx, ValidationError.DecompressionError); return null; } } private boolean verifyChecksum(ByteBuf headersAndPayload, MessageIdData messageId) { if (hasChecksum(headersAndPayload)) { int checksum = readChecksum(headersAndPayload); int computedChecksum = computeChecksum(headersAndPayload); if (checksum != computedChecksum) { log.error( "[{}][{}] Checksum mismatch for message at {}:{}. Received checksum: 0x{}, Computed checksum: 0x{}", topic, subscription, messageId.getLedgerId(), messageId.getEntryId(), Long.toHexString(checksum), Integer.toHexString(computedChecksum)); return false; } } return true; } private void discardCorruptedMessage(MessageIdData messageId, ClientCnx currentCnx, ValidationError validationError) { log.error("[{}][{}] Discarding corrupted message at {}:{}", topic, subscription, messageId.getLedgerId(), messageId.getEntryId()); discardMessage(messageId, currentCnx, validationError); } private void discardMessage(MessageIdData messageId, ClientCnx currentCnx, ValidationError validationError) { ByteBuf cmd = Commands.newAck(consumerId, messageId.getLedgerId(), messageId.getEntryId(), AckType.Individual, validationError, Collections.emptyMap()); currentCnx.ctx().writeAndFlush(cmd, currentCnx.ctx().voidPromise()); increaseAvailablePermits(currentCnx); stats.incrementNumReceiveFailed(); } @Override String getHandlerName() { return subscription; } @Override public boolean isConnected() { return getClientCnx() != null && (getState() == State.Ready); } int getPartitionIndex() { return partitionIndex; } @Override public int getAvailablePermits() { return AVAILABLE_PERMITS_UPDATER.get(this); } @Override public int numMessagesInQueue() { return incomingMessages.size(); } @Override public void redeliverUnacknowledgedMessages() { ClientCnx cnx = cnx(); if (isConnected() && cnx.getRemoteEndpointProtocolVersion() >= ProtocolVersion.v2.getNumber()) { int currentSize = 0; synchronized (this) { currentSize = incomingMessages.size(); incomingMessages.clear(); unAckedMessageTracker.clear(); } cnx.ctx().writeAndFlush(Commands.newRedeliverUnacknowledgedMessages(consumerId), cnx.ctx().voidPromise()); if (currentSize > 0) { increaseAvailablePermits(cnx, currentSize); } if (log.isDebugEnabled()) { log.debug("[{}] [{}] [{}] Redeliver unacked messages and send {} permits", subscription, topic, consumerName, currentSize); } return; } if (cnx == null || (getState() == State.Connecting)) { log.warn("[{}] Client Connection needs to be established for redelivery of unacknowledged messages", this); } else { log.warn("[{}] Reconnecting the client to redeliver the messages.", this); cnx.ctx().close(); } } @Override public void redeliverUnacknowledgedMessages(Set<MessageId> messageIds) { if (messageIds.isEmpty()) { return; } checkArgument(messageIds.stream().findFirst().get() instanceof MessageIdImpl); if (conf.getSubscriptionType() != SubscriptionType.Shared && conf.getSubscriptionType() != SubscriptionType.Key_Shared) { // We cannot redeliver single messages if subscription type is not Shared redeliverUnacknowledgedMessages(); return; } ClientCnx cnx = cnx(); if (isConnected() && cnx.getRemoteEndpointProtocolVersion() >= ProtocolVersion.v2.getNumber()) { int messagesFromQueue = removeExpiredMessagesFromQueue(messageIds); Iterable<List<MessageIdImpl>> batches = Iterables.partition( messageIds.stream() .map(messageId -> (MessageIdImpl)messageId) .collect(Collectors.toSet()), MAX_REDELIVER_UNACKNOWLEDGED); MessageIdData.Builder builder = MessageIdData.newBuilder(); batches.forEach(ids -> { List<MessageIdData> messageIdDatas = ids.stream() .filter(messageId -> !processPossibleToDLQ(messageId)) .map(messageId -> { builder.setPartition(messageId.getPartitionIndex()); builder.setLedgerId(messageId.getLedgerId()); builder.setEntryId(messageId.getEntryId()); return builder.build(); }).collect(Collectors.toList()); ByteBuf cmd = Commands.newRedeliverUnacknowledgedMessages(consumerId, messageIdDatas); cnx.ctx().writeAndFlush(cmd, cnx.ctx().voidPromise()); messageIdDatas.forEach(MessageIdData::recycle); }); if (messagesFromQueue > 0) { increaseAvailablePermits(cnx, messagesFromQueue); } builder.recycle(); if (log.isDebugEnabled()) { log.debug("[{}] [{}] [{}] Redeliver unacked messages and increase {} permits", subscription, topic, consumerName, messagesFromQueue); } return; } if (cnx == null || (getState() == State.Connecting)) { log.warn("[{}] Client Connection needs to be established for redelivery of unacknowledged messages", this); } else { log.warn("[{}] Reconnecting the client to redeliver the messages.", this); cnx.ctx().close(); } } private boolean processPossibleToDLQ(MessageIdImpl messageId) { List<MessageImpl<T>> deadLetterMessages = null; if (possibleSendToDeadLetterTopicMessages != null) { if (messageId instanceof BatchMessageIdImpl) { deadLetterMessages = possibleSendToDeadLetterTopicMessages.get(new MessageIdImpl(messageId.getLedgerId(), messageId.getEntryId(), getPartitionIndex())); } else { deadLetterMessages = possibleSendToDeadLetterTopicMessages.get(messageId); } } if (deadLetterMessages != null) { if (deadLetterProducer == null) { try { deadLetterProducer = client.newProducer(schema) .topic(this.deadLetterPolicy.getDeadLetterTopic()) .blockIfQueueFull(false) .create(); } catch (Exception e) { log.error("Create dead letter producer exception with topic: {}", deadLetterPolicy.getDeadLetterTopic(), e); } } if (deadLetterProducer != null) { try { for (MessageImpl<T> message : deadLetterMessages) { deadLetterProducer.newMessage() .value(message.getValue()) .properties(message.getProperties()) .send(); } acknowledge(messageId); return true; } catch (Exception e) { log.error("Send to dead letter topic exception with topic: {}, messageId: {}", deadLetterProducer.getTopic(), messageId, e); } } } return false; } @Override public void seek(MessageId messageId) throws PulsarClientException { try { seekAsync(messageId).get(); } catch (ExecutionException | InterruptedException e) { throw new PulsarClientException(e); } } @Override public void seek(long timestamp) throws PulsarClientException { try { seekAsync(timestamp).get(); } catch (ExecutionException | InterruptedException e) { throw new PulsarClientException(e); } } @Override public CompletableFuture<Void> seekAsync(long timestamp) { if (getState() == State.Closing || getState() == State.Closed) { return FutureUtil .failedFuture(new PulsarClientException.AlreadyClosedException("Consumer was already closed")); } if (!isConnected()) { return FutureUtil.failedFuture(new PulsarClientException("Not connected to broker")); } final CompletableFuture<Void> seekFuture = new CompletableFuture<>(); long requestId = client.newRequestId(); ByteBuf seek = Commands.newSeek(consumerId, requestId, timestamp); ClientCnx cnx = cnx(); log.info("[{}][{}] Seek subscription to publish time {}", topic, subscription, timestamp); cnx.sendRequestWithId(seek, requestId).thenRun(() -> { log.info("[{}][{}] Successfully reset subscription to publish time {}", topic, subscription, timestamp); acknowledgmentsGroupingTracker.flushAndClean(); lastDequeuedMessage = MessageId.earliest; incomingMessages.clear(); seekFuture.complete(null); }).exceptionally(e -> { log.error("[{}][{}] Failed to reset subscription: {}", topic, subscription, e.getCause().getMessage()); seekFuture.completeExceptionally(e.getCause()); return null; }); return seekFuture; } @Override public CompletableFuture<Void> seekAsync(MessageId messageId) { if (getState() == State.Closing || getState() == State.Closed) { return FutureUtil .failedFuture(new PulsarClientException.AlreadyClosedException("Consumer was already closed")); } if (!isConnected()) { return FutureUtil.failedFuture(new PulsarClientException("Not connected to broker")); } final CompletableFuture<Void> seekFuture = new CompletableFuture<>(); long requestId = client.newRequestId(); MessageIdImpl msgId = (MessageIdImpl) messageId; ByteBuf seek = Commands.newSeek(consumerId, requestId, msgId.getLedgerId(), msgId.getEntryId()); ClientCnx cnx = cnx(); log.info("[{}][{}] Seek subscription to message id {}", topic, subscription, messageId); cnx.sendRequestWithId(seek, requestId).thenRun(() -> { log.info("[{}][{}] Successfully reset subscription to message id {}", topic, subscription, messageId); acknowledgmentsGroupingTracker.flushAndClean(); lastDequeuedMessage = messageId; incomingMessages.clear(); seekFuture.complete(null); }).exceptionally(e -> { log.error("[{}][{}] Failed to reset subscription: {}", topic, subscription, e.getCause().getMessage()); seekFuture.completeExceptionally(e.getCause()); return null; }); return seekFuture; } public boolean hasMessageAvailable() throws PulsarClientException { try { if (hasMoreMessages(lastMessageIdInBroker, lastDequeuedMessage)) { return true; } return hasMessageAvailableAsync().get(); } catch (ExecutionException | InterruptedException e) { throw new PulsarClientException(e); } } public CompletableFuture<Boolean> hasMessageAvailableAsync() { final CompletableFuture<Boolean> booleanFuture = new CompletableFuture<>(); if (hasMoreMessages(lastMessageIdInBroker, lastDequeuedMessage)) { booleanFuture.complete(true); } else { getLastMessageIdAsync().thenAccept(messageId -> { lastMessageIdInBroker = messageId; if (hasMoreMessages(lastMessageIdInBroker, lastDequeuedMessage)) { booleanFuture.complete(true); } else { booleanFuture.complete(false); } }).exceptionally(e -> { log.error("[{}][{}] Failed getLastMessageId command", topic, subscription); booleanFuture.completeExceptionally(e.getCause()); return null; }); } return booleanFuture; } private boolean hasMoreMessages(MessageId lastMessageIdInBroker, MessageId lastDequeuedMessage) { if (lastMessageIdInBroker.compareTo(lastDequeuedMessage) > 0 && ((MessageIdImpl)lastMessageIdInBroker).getEntryId() != -1) { return true; } else { // Make sure batching message can be read completely. return lastMessageIdInBroker.compareTo(lastDequeuedMessage) == 0 && incomingMessages.size() > 0; } } CompletableFuture<MessageId> getLastMessageIdAsync() { if (getState() == State.Closing || getState() == State.Closed) { return FutureUtil .failedFuture(new PulsarClientException.AlreadyClosedException("Consumer was already closed")); } AtomicLong opTimeoutMs = new AtomicLong(client.getConfiguration().getOperationTimeoutMs()); Backoff backoff = new BackoffBuilder() .setInitialTime(100, TimeUnit.MILLISECONDS) .setMax(opTimeoutMs.get() * 2, TimeUnit.MILLISECONDS) .setMandatoryStop(0, TimeUnit.MILLISECONDS) .useUserConfiguredIntervals(backoffIntervalNanos, maxBackoffIntervalNanos) .create(); CompletableFuture<MessageId> getLastMessageIdFuture = new CompletableFuture<>(); internalGetLastMessageIdAsync(backoff, opTimeoutMs, getLastMessageIdFuture); return getLastMessageIdFuture; } private void internalGetLastMessageIdAsync(final Backoff backoff, final AtomicLong remainingTime, CompletableFuture<MessageId> future) { ClientCnx cnx = cnx(); if (isConnected() && cnx != null) { if (!Commands.peerSupportsGetLastMessageId(cnx.getRemoteEndpointProtocolVersion())) { future.completeExceptionally(new PulsarClientException .NotSupportedException("GetLastMessageId Not supported for ProtocolVersion: " + cnx.getRemoteEndpointProtocolVersion())); } long requestId = client.newRequestId(); ByteBuf getLastIdCmd = Commands.newGetLastMessageId(consumerId, requestId); log.info("[{}][{}] Get topic last message Id", topic, subscription); cnx.sendGetLastMessageId(getLastIdCmd, requestId).thenAccept((result) -> { log.info("[{}][{}] Successfully getLastMessageId {}:{}", topic, subscription, result.getLedgerId(), result.getEntryId()); future.complete(new MessageIdImpl(result.getLedgerId(), result.getEntryId(), result.getPartition())); }).exceptionally(e -> { log.error("[{}][{}] Failed getLastMessageId command", topic, subscription); future.completeExceptionally(e.getCause()); return null; }); } else { long nextDelay = Math.min(backoff.next(), remainingTime.get()); if (nextDelay <= 0) { future.completeExceptionally(new PulsarClientException .TimeoutException("Could not getLastMessageId within configured timeout.")); return; } ((ScheduledExecutorService) listenerExecutor).schedule(() -> { log.warn("[{}] [{}] Could not get connection while getLastMessageId -- Will try again in {} ms", topic, getHandlerName(), nextDelay); remainingTime.addAndGet(-nextDelay); internalGetLastMessageIdAsync(backoff, remainingTime, future); }, nextDelay, TimeUnit.MILLISECONDS); } } private MessageIdImpl getMessageIdImpl(Message<?> msg) { MessageIdImpl messageId = (MessageIdImpl) msg.getMessageId(); if (messageId instanceof BatchMessageIdImpl) { // messageIds contain MessageIdImpl, not BatchMessageIdImpl messageId = new MessageIdImpl(messageId.getLedgerId(), messageId.getEntryId(), getPartitionIndex()); } return messageId; } private boolean isMessageUndecryptable(MessageMetadata msgMetadata) { return (msgMetadata.getEncryptionKeysCount() > 0 && conf.getCryptoKeyReader() == null && conf.getCryptoFailureAction() == ConsumerCryptoFailureAction.CONSUME); } /** * Create EncryptionContext if message payload is encrypted * * @param msgMetadata * @return {@link Optional}<{@link EncryptionContext}> */ private Optional<EncryptionContext> createEncryptionContext(MessageMetadata msgMetadata) { EncryptionContext encryptionCtx = null; if (msgMetadata.getEncryptionKeysCount() > 0) { encryptionCtx = new EncryptionContext(); Map<String, EncryptionKey> keys = msgMetadata.getEncryptionKeysList().stream() .collect( Collectors.toMap(EncryptionKeys::getKey, e -> new EncryptionKey(e.getValue().toByteArray(), e.getMetadataList() != null ? e.getMetadataList().stream().collect( Collectors.toMap(KeyValue::getKey, KeyValue::getValue)) : null))); byte[] encParam = new byte[MessageCrypto.ivLen]; msgMetadata.getEncryptionParam().copyTo(encParam, 0); Optional<Integer> batchSize = Optional .ofNullable(msgMetadata.hasNumMessagesInBatch() ? msgMetadata.getNumMessagesInBatch() : null); encryptionCtx.setKeys(keys); encryptionCtx.setParam(encParam); encryptionCtx.setAlgorithm(msgMetadata.getEncryptionAlgo()); encryptionCtx .setCompressionType(CompressionCodecProvider.convertFromWireProtocol(msgMetadata.getCompression())); encryptionCtx.setUncompressedMessageSize(msgMetadata.getUncompressedSize()); encryptionCtx.setBatchSize(batchSize); } return Optional.ofNullable(encryptionCtx); } private int removeExpiredMessagesFromQueue(Set<MessageId> messageIds) { int messagesFromQueue = 0; Message<T> peek = incomingMessages.peek(); if (peek != null) { MessageIdImpl messageId = getMessageIdImpl(peek); if (!messageIds.contains(messageId)) { // first message is not expired, then no message is expired in queue. return 0; } // try not to remove elements that are added while we remove Message<T> message = incomingMessages.poll(); while (message != null) { messagesFromQueue++; MessageIdImpl id = getMessageIdImpl(message); if (!messageIds.contains(id)) { messageIds.add(id); break; } message = incomingMessages.poll(); } } return messagesFromQueue; } @Override public ConsumerStats getStats() { return stats; } void setTerminated() { log.info("[{}] [{}] [{}] Consumer has reached the end of topic", subscription, topic, consumerName); hasReachedEndOfTopic = true; if (listener != null) { // Propagate notification to listener listener.reachedEndOfTopic(this); } } @Override public boolean hasReachedEndOfTopic() { return hasReachedEndOfTopic; } @Override public int hashCode() { return Objects.hash(topic, subscription, consumerName); } // wrapper for connection methods ClientCnx cnx() { return this.connectionHandler.cnx(); } void resetBackoff() { this.connectionHandler.resetBackoff(); } void connectionClosed(ClientCnx cnx) { this.connectionHandler.connectionClosed(cnx); } @VisibleForTesting public ClientCnx getClientCnx() { return this.connectionHandler.getClientCnx(); } void setClientCnx(ClientCnx clientCnx) { this.connectionHandler.setClientCnx(clientCnx); } void reconnectLater(Throwable exception) { this.connectionHandler.reconnectLater(exception); } void grabCnx() { this.connectionHandler.grabCnx(); } public String getTopicNameWithoutPartition() { return topicNameWithoutPartition; } private static final Logger log = LoggerFactory.getLogger(ConsumerImpl.class); }
pulsar-client/src/main/java/org/apache/pulsar/client/impl/ConsumerImpl.java
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.pulsar.client.impl; import static com.google.common.base.Preconditions.checkArgument; import static com.scurrilous.circe.checksum.Crc32cIntChecksum.computeChecksum; import static org.apache.pulsar.common.api.Commands.hasChecksum; import static org.apache.pulsar.common.api.Commands.readChecksum; import com.google.common.annotations.VisibleForTesting; import com.google.common.collect.Iterables; import io.netty.buffer.ByteBuf; import io.netty.util.Timeout; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Optional; import java.util.Set; import java.util.concurrent.CompletableFuture; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutorService; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicIntegerFieldUpdater; import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.locks.ReadWriteLock; import java.util.concurrent.locks.ReentrantReadWriteLock; import java.util.stream.Collectors; import org.apache.commons.lang3.StringUtils; import org.apache.pulsar.client.api.Consumer; import org.apache.pulsar.client.api.ConsumerCryptoFailureAction; import org.apache.pulsar.client.api.ConsumerStats; import org.apache.pulsar.client.api.DeadLetterPolicy; import org.apache.pulsar.client.api.Message; import org.apache.pulsar.client.api.MessageId; import org.apache.pulsar.client.api.Producer; import org.apache.pulsar.client.api.PulsarClientException; import org.apache.pulsar.client.api.Schema; import org.apache.pulsar.client.api.SubscriptionInitialPosition; import org.apache.pulsar.client.api.SubscriptionType; import org.apache.pulsar.client.impl.conf.ConsumerConfigurationData; import org.apache.pulsar.common.api.Commands; import org.apache.pulsar.common.api.EncryptionContext; import org.apache.pulsar.common.api.EncryptionContext.EncryptionKey; import org.apache.pulsar.common.api.proto.PulsarApi; import org.apache.pulsar.common.api.proto.PulsarApi.CommandAck.AckType; import org.apache.pulsar.common.api.proto.PulsarApi.CommandAck.ValidationError; import org.apache.pulsar.common.api.proto.PulsarApi.CommandSubscribe.InitialPosition; import org.apache.pulsar.common.api.proto.PulsarApi.CompressionType; import org.apache.pulsar.common.api.proto.PulsarApi.EncryptionKeys; import org.apache.pulsar.common.api.proto.PulsarApi.KeyValue; import org.apache.pulsar.common.api.proto.PulsarApi.MessageIdData; import org.apache.pulsar.common.api.proto.PulsarApi.MessageMetadata; import org.apache.pulsar.common.api.proto.PulsarApi.ProtocolVersion; import org.apache.pulsar.common.compression.CompressionCodec; import org.apache.pulsar.common.compression.CompressionCodecProvider; import org.apache.pulsar.common.naming.TopicName; import org.apache.pulsar.common.schema.SchemaInfo; import org.apache.pulsar.common.schema.SchemaType; import org.apache.pulsar.common.util.FutureUtil; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class ConsumerImpl<T> extends ConsumerBase<T> implements ConnectionHandler.Connection { private static final int MAX_REDELIVER_UNACKNOWLEDGED = 1000; final long consumerId; // Number of messages that have delivered to the application. Every once in a while, this number will be sent to the // broker to notify that we are ready to get (and store in the incoming messages queue) more messages @SuppressWarnings("rawtypes") private static final AtomicIntegerFieldUpdater<ConsumerImpl> AVAILABLE_PERMITS_UPDATER = AtomicIntegerFieldUpdater .newUpdater(ConsumerImpl.class, "availablePermits"); @SuppressWarnings("unused") private volatile int availablePermits = 0; protected volatile MessageId lastDequeuedMessage = MessageId.earliest; private volatile MessageId lastMessageIdInBroker = MessageId.earliest; private long subscribeTimeout; private final int partitionIndex; private final int receiverQueueRefillThreshold; private final ReadWriteLock lock = new ReentrantReadWriteLock(); private final UnAckedMessageTracker unAckedMessageTracker; private final AcknowledgmentsGroupingTracker acknowledgmentsGroupingTracker; private final NegativeAcksTracker negativeAcksTracker; protected final ConsumerStatsRecorder stats; private final int priorityLevel; private final SubscriptionMode subscriptionMode; private volatile BatchMessageIdImpl startMessageId; private volatile boolean hasReachedEndOfTopic; private final MessageCrypto msgCrypto; private final Map<String, String> metadata; private final boolean readCompacted; private final SubscriptionInitialPosition subscriptionInitialPosition; private final ConnectionHandler connectionHandler; private final TopicName topicName; private final String topicNameWithoutPartition; private final Map<MessageIdImpl, List<MessageImpl<T>>> possibleSendToDeadLetterTopicMessages; private final DeadLetterPolicy deadLetterPolicy; private Producer<T> deadLetterProducer; private final long backoffIntervalNanos; private final long maxBackoffIntervalNanos; protected volatile boolean paused; enum SubscriptionMode { // Make the subscription to be backed by a durable cursor that will retain messages and persist the current // position Durable, // Lightweight subscription mode that doesn't have a durable cursor associated NonDurable } static <T> ConsumerImpl<T> newConsumerImpl(PulsarClientImpl client, String topic, ConsumerConfigurationData<T> conf, ExecutorService listenerExecutor, int partitionIndex, CompletableFuture<Consumer<T>> subscribeFuture, SubscriptionMode subscriptionMode, MessageId startMessageId, Schema<T> schema, ConsumerInterceptors<T> interceptors) { return ConsumerImpl.newConsumerImpl(client, topic, conf, listenerExecutor, partitionIndex, subscribeFuture, subscriptionMode, startMessageId, schema, interceptors, Backoff.DEFAULT_INTERVAL_IN_NANOSECONDS, Backoff.MAX_BACKOFF_INTERVAL_NANOSECONDS); } static <T> ConsumerImpl<T> newConsumerImpl(PulsarClientImpl client, String topic, ConsumerConfigurationData<T> conf, ExecutorService listenerExecutor, int partitionIndex, CompletableFuture<Consumer<T>> subscribeFuture, SubscriptionMode subscriptionMode, MessageId startMessageId, Schema<T> schema, ConsumerInterceptors<T> interceptors, long backoffIntervalNanos, long maxBackoffIntervalNanos) { if (conf.getReceiverQueueSize() == 0) { return new ZeroQueueConsumerImpl<>(client, topic, conf, listenerExecutor, partitionIndex, subscribeFuture, subscriptionMode, startMessageId, schema, interceptors, backoffIntervalNanos, maxBackoffIntervalNanos); } else { return new ConsumerImpl<>(client, topic, conf, listenerExecutor, partitionIndex, subscribeFuture, subscriptionMode, startMessageId, schema, interceptors, backoffIntervalNanos, maxBackoffIntervalNanos); } } protected ConsumerImpl(PulsarClientImpl client, String topic, ConsumerConfigurationData<T> conf, ExecutorService listenerExecutor, int partitionIndex, CompletableFuture<Consumer<T>> subscribeFuture, SubscriptionMode subscriptionMode, MessageId startMessageId, Schema<T> schema, ConsumerInterceptors<T> interceptors, long backoffIntervalNanos, long maxBackoffIntervalNanos) { super(client, topic, conf, conf.getReceiverQueueSize(), listenerExecutor, subscribeFuture, schema, interceptors); this.consumerId = client.newConsumerId(); this.subscriptionMode = subscriptionMode; this.startMessageId = startMessageId != null ? new BatchMessageIdImpl((MessageIdImpl) startMessageId) : null; AVAILABLE_PERMITS_UPDATER.set(this, 0); this.subscribeTimeout = System.currentTimeMillis() + client.getConfiguration().getOperationTimeoutMs(); this.partitionIndex = partitionIndex; this.receiverQueueRefillThreshold = conf.getReceiverQueueSize() / 2; this.priorityLevel = conf.getPriorityLevel(); this.readCompacted = conf.isReadCompacted(); this.subscriptionInitialPosition = conf.getSubscriptionInitialPosition(); this.negativeAcksTracker = new NegativeAcksTracker(this, conf); if (client.getConfiguration().getStatsIntervalSeconds() > 0) { stats = new ConsumerStatsRecorderImpl(client, conf, this); } else { stats = ConsumerStatsDisabled.INSTANCE; } if (conf.getAckTimeoutMillis() != 0) { if (conf.getTickDurationMillis() > 0) { this.unAckedMessageTracker = new UnAckedMessageTracker(client, this, conf.getAckTimeoutMillis(), Math.min(conf.getTickDurationMillis(), conf.getAckTimeoutMillis())); } else { this.unAckedMessageTracker = new UnAckedMessageTracker(client, this, conf.getAckTimeoutMillis()); } } else { this.unAckedMessageTracker = UnAckedMessageTracker.UNACKED_MESSAGE_TRACKER_DISABLED; } // Create msgCrypto if not created already if (conf.getCryptoKeyReader() != null) { this.msgCrypto = new MessageCrypto(String.format("[%s] [%s]", topic, subscription), false); } else { this.msgCrypto = null; } if (conf.getProperties().isEmpty()) { metadata = Collections.emptyMap(); } else { metadata = Collections.unmodifiableMap(new HashMap<>(conf.getProperties())); } this.connectionHandler = new ConnectionHandler(this, new BackoffBuilder() .setInitialTime(100, TimeUnit.MILLISECONDS) .setMax(60, TimeUnit.SECONDS) .setMandatoryStop(0, TimeUnit.MILLISECONDS) .useUserConfiguredIntervals(backoffIntervalNanos, maxBackoffIntervalNanos) .create(), this); this.topicName = TopicName.get(topic); if (this.topicName.isPersistent()) { this.acknowledgmentsGroupingTracker = new PersistentAcknowledgmentsGroupingTracker(this, conf, client.eventLoopGroup()); } else { this.acknowledgmentsGroupingTracker = NonPersistentAcknowledgmentGroupingTracker.of(); } if (conf.getDeadLetterPolicy() != null) { possibleSendToDeadLetterTopicMessages = new ConcurrentHashMap<>(); if (StringUtils.isNotBlank(conf.getDeadLetterPolicy().getDeadLetterTopic())) { this.deadLetterPolicy = DeadLetterPolicy.builder() .maxRedeliverCount(conf.getDeadLetterPolicy().getMaxRedeliverCount()) .deadLetterTopic(conf.getDeadLetterPolicy().getDeadLetterTopic()) .build(); } else { this.deadLetterPolicy = DeadLetterPolicy.builder() .maxRedeliverCount(conf.getDeadLetterPolicy().getMaxRedeliverCount()) .deadLetterTopic(String.format("%s-%s-DLQ", topic, subscription)) .build(); } } else { deadLetterPolicy = null; possibleSendToDeadLetterTopicMessages = null; } this.backoffIntervalNanos = backoffIntervalNanos; this.maxBackoffIntervalNanos = maxBackoffIntervalNanos; topicNameWithoutPartition = topicName.getPartitionedTopicName(); grabCnx(); } public ConnectionHandler getConnectionHandler() { return connectionHandler; } public UnAckedMessageTracker getUnAckedMessageTracker() { return unAckedMessageTracker; } @Override public CompletableFuture<Void> unsubscribeAsync() { if (getState() == State.Closing || getState() == State.Closed) { return FutureUtil .failedFuture(new PulsarClientException.AlreadyClosedException("Consumer was already closed")); } final CompletableFuture<Void> unsubscribeFuture = new CompletableFuture<>(); if (isConnected()) { setState(State.Closing); long requestId = client.newRequestId(); ByteBuf unsubscribe = Commands.newUnsubscribe(consumerId, requestId); ClientCnx cnx = cnx(); cnx.sendRequestWithId(unsubscribe, requestId).thenRun(() -> { cnx.removeConsumer(consumerId); unAckedMessageTracker.close(); if (possibleSendToDeadLetterTopicMessages != null) { possibleSendToDeadLetterTopicMessages.clear(); } client.cleanupConsumer(ConsumerImpl.this); log.info("[{}][{}] Successfully unsubscribed from topic", topic, subscription); setState(State.Closed); unsubscribeFuture.complete(null); }).exceptionally(e -> { log.error("[{}][{}] Failed to unsubscribe: {}", topic, subscription, e.getCause().getMessage()); setState(State.Ready); unsubscribeFuture.completeExceptionally(e.getCause()); return null; }); } else { unsubscribeFuture.completeExceptionally(new PulsarClientException("Not connected to broker")); } return unsubscribeFuture; } @Override protected Message<T> internalReceive() throws PulsarClientException { Message<T> message; try { message = incomingMessages.take(); trackMessage(message); Message<T> interceptMsg = beforeConsume(message); messageProcessed(interceptMsg); return interceptMsg; } catch (InterruptedException e) { Thread.currentThread().interrupt(); stats.incrementNumReceiveFailed(); throw new PulsarClientException(e); } } @Override protected CompletableFuture<Message<T>> internalReceiveAsync() { CompletableFuture<Message<T>> result = new CompletableFuture<>(); Message<T> message = null; try { lock.writeLock().lock(); message = incomingMessages.poll(0, TimeUnit.MILLISECONDS); if (message == null) { pendingReceives.add(result); } } catch (InterruptedException e) { Thread.currentThread().interrupt(); result.completeExceptionally(e); } finally { lock.writeLock().unlock(); } if (message != null) { trackMessage(message); Message<T> interceptMsg = beforeConsume(message); messageProcessed(interceptMsg); result.complete(interceptMsg); } return result; } @Override protected Message<T> internalReceive(int timeout, TimeUnit unit) throws PulsarClientException { Message<T> message; try { message = incomingMessages.poll(timeout, unit); trackMessage(message); Message<T> interceptMsg = beforeConsume(message); if (interceptMsg != null) { messageProcessed(interceptMsg); } return interceptMsg; } catch (InterruptedException e) { Thread.currentThread().interrupt(); State state = getState(); if (state != State.Closing && state != State.Closed) { stats.incrementNumReceiveFailed(); throw new PulsarClientException(e); } else { return null; } } } boolean markAckForBatchMessage(BatchMessageIdImpl batchMessageId, AckType ackType, Map<String,Long> properties) { boolean isAllMsgsAcked; if (ackType == AckType.Individual) { isAllMsgsAcked = batchMessageId.ackIndividual(); } else { isAllMsgsAcked = batchMessageId.ackCumulative(); } int outstandingAcks = 0; if (log.isDebugEnabled()) { outstandingAcks = batchMessageId.getOutstandingAcksInSameBatch(); } int batchSize = batchMessageId.getBatchSize(); // all messages in this batch have been acked if (isAllMsgsAcked) { if (log.isDebugEnabled()) { log.debug("[{}] [{}] can ack message to broker {}, acktype {}, cardinality {}, length {}", subscription, consumerName, batchMessageId, ackType, outstandingAcks, batchSize); } return true; } else { if (AckType.Cumulative == ackType && !batchMessageId.getAcker().isPrevBatchCumulativelyAcked()) { sendAcknowledge(batchMessageId.prevBatchMessageId(), AckType.Cumulative, properties); batchMessageId.getAcker().setPrevBatchCumulativelyAcked(true); } else { onAcknowledge(batchMessageId, null); } if (log.isDebugEnabled()) { log.debug("[{}] [{}] cannot ack message to broker {}, acktype {}, pending acks - {}", subscription, consumerName, batchMessageId, ackType, outstandingAcks); } } return false; } @Override protected CompletableFuture<Void> doAcknowledge(MessageId messageId, AckType ackType, Map<String,Long> properties) { checkArgument(messageId instanceof MessageIdImpl); if (getState() != State.Ready && getState() != State.Connecting) { stats.incrementNumAcksFailed(); PulsarClientException exception = new PulsarClientException("Consumer not ready. State: " + getState()); if (AckType.Individual.equals(ackType)) { onAcknowledge(messageId, exception); } else if (AckType.Cumulative.equals(ackType)) { onAcknowledgeCumulative(messageId, exception); } return FutureUtil.failedFuture(exception); } if (messageId instanceof BatchMessageIdImpl) { if (markAckForBatchMessage((BatchMessageIdImpl) messageId, ackType, properties)) { // all messages in batch have been acked so broker can be acked via sendAcknowledge() if (log.isDebugEnabled()) { log.debug("[{}] [{}] acknowledging message - {}, acktype {}", subscription, consumerName, messageId, ackType); } } else { // other messages in batch are still pending ack. return CompletableFuture.completedFuture(null); } } return sendAcknowledge(messageId, ackType, properties); } private CompletableFuture<Void> sendAcknowledge(MessageId messageId, AckType ackType, Map<String,Long> properties) { MessageIdImpl msgId = (MessageIdImpl) messageId; if (ackType == AckType.Individual) { if (messageId instanceof BatchMessageIdImpl) { BatchMessageIdImpl batchMessageId = (BatchMessageIdImpl) messageId; stats.incrementNumAcksSent(batchMessageId.getBatchSize()); unAckedMessageTracker.remove(new MessageIdImpl(batchMessageId.getLedgerId(), batchMessageId.getEntryId(), batchMessageId.getPartitionIndex())); if (possibleSendToDeadLetterTopicMessages != null) { possibleSendToDeadLetterTopicMessages.remove(new MessageIdImpl(batchMessageId.getLedgerId(), batchMessageId.getEntryId(), batchMessageId.getPartitionIndex())); } } else { // increment counter by 1 for non-batch msg unAckedMessageTracker.remove(msgId); if (possibleSendToDeadLetterTopicMessages != null) { possibleSendToDeadLetterTopicMessages.remove(msgId); } stats.incrementNumAcksSent(1); } onAcknowledge(messageId, null); } else if (ackType == AckType.Cumulative) { onAcknowledgeCumulative(messageId, null); stats.incrementNumAcksSent(unAckedMessageTracker.removeMessagesTill(msgId)); } acknowledgmentsGroupingTracker.addAcknowledgment(msgId, ackType, properties); // Consumer acknowledgment operation immediately succeeds. In any case, if we're not able to send ack to broker, // the messages will be re-delivered return CompletableFuture.completedFuture(null); } @Override public void negativeAcknowledge(MessageId messageId) { negativeAcksTracker.add(messageId); // Ensure the message is not redelivered for ack-timeout, since we did receive an "ack" unAckedMessageTracker.remove(messageId); } @Override public void connectionOpened(final ClientCnx cnx) { setClientCnx(cnx); cnx.registerConsumer(consumerId, this); log.info("[{}][{}] Subscribing to topic on cnx {}", topic, subscription, cnx.ctx().channel()); long requestId = client.newRequestId(); int currentSize; synchronized (this) { currentSize = incomingMessages.size(); startMessageId = clearReceiverQueue(); if (possibleSendToDeadLetterTopicMessages != null) { possibleSendToDeadLetterTopicMessages.clear(); } } boolean isDurable = subscriptionMode == SubscriptionMode.Durable; MessageIdData startMessageIdData; if (isDurable) { // For regular durable subscriptions, the message id from where to restart will be determined by the broker. startMessageIdData = null; } else { // For non-durable we are going to restart from the next entry MessageIdData.Builder builder = MessageIdData.newBuilder(); builder.setLedgerId(startMessageId.getLedgerId()); builder.setEntryId(startMessageId.getEntryId()); if (startMessageId instanceof BatchMessageIdImpl) { builder.setBatchIndex(((BatchMessageIdImpl) startMessageId).getBatchIndex()); } startMessageIdData = builder.build(); builder.recycle(); } SchemaInfo si = schema.getSchemaInfo(); if (si != null && (SchemaType.BYTES == si.getType() || SchemaType.NONE == si.getType())) { // don't set schema for Schema.BYTES si = null; } ByteBuf request = Commands.newSubscribe(topic, subscription, consumerId, requestId, getSubType(), priorityLevel, consumerName, isDurable, startMessageIdData, metadata, readCompacted, conf.isReplicateSubscriptionState(), InitialPosition.valueOf(subscriptionInitialPosition.getValue()), si); if (startMessageIdData != null) { startMessageIdData.recycle(); } cnx.sendRequestWithId(request, requestId).thenRun(() -> { synchronized (ConsumerImpl.this) { if (changeToReadyState()) { consumerIsReconnectedToBroker(cnx, currentSize); } else { // Consumer was closed while reconnecting, close the connection to make sure the broker // drops the consumer on its side setState(State.Closed); cnx.removeConsumer(consumerId); cnx.channel().close(); return; } } resetBackoff(); boolean firstTimeConnect = subscribeFuture.complete(this); // if the consumer is not partitioned or is re-connected and is partitioned, we send the flow // command to receive messages. // For readers too (isDurable==false), the partition idx will be set though we have to // send available permits immediately after establishing the reader session if (!(firstTimeConnect && partitionIndex > -1 && isDurable) && conf.getReceiverQueueSize() != 0) { sendFlowPermitsToBroker(cnx, conf.getReceiverQueueSize()); } }).exceptionally((e) -> { cnx.removeConsumer(consumerId); if (getState() == State.Closing || getState() == State.Closed) { // Consumer was closed while reconnecting, close the connection to make sure the broker // drops the consumer on its side cnx.channel().close(); return null; } log.warn("[{}][{}] Failed to subscribe to topic on {}", topic, subscription, cnx.channel().remoteAddress()); if (e.getCause() instanceof PulsarClientException && getConnectionHandler().isRetriableError((PulsarClientException) e.getCause()) && System.currentTimeMillis() < subscribeTimeout) { reconnectLater(e.getCause()); return null; } if (!subscribeFuture.isDone()) { // unable to create new consumer, fail operation setState(State.Failed); subscribeFuture.completeExceptionally(e); client.cleanupConsumer(this); } else { // consumer was subscribed and connected but we got some error, keep trying reconnectLater(e.getCause()); } return null; }); } protected void consumerIsReconnectedToBroker(ClientCnx cnx, int currentQueueSize) { log.info("[{}][{}] Subscribed to topic on {} -- consumer: {}", topic, subscription, cnx.channel().remoteAddress(), consumerId); AVAILABLE_PERMITS_UPDATER.set(this, 0); } /** * Clear the internal receiver queue and returns the message id of what was the 1st message in the queue that was * not seen by the application */ private BatchMessageIdImpl clearReceiverQueue() { List<Message<?>> currentMessageQueue = new ArrayList<>(incomingMessages.size()); incomingMessages.drainTo(currentMessageQueue); if (!currentMessageQueue.isEmpty()) { MessageIdImpl nextMessageInQueue = (MessageIdImpl) currentMessageQueue.get(0).getMessageId(); BatchMessageIdImpl previousMessage; if (nextMessageInQueue instanceof BatchMessageIdImpl) { // Get on the previous message within the current batch previousMessage = new BatchMessageIdImpl(nextMessageInQueue.getLedgerId(), nextMessageInQueue.getEntryId(), nextMessageInQueue.getPartitionIndex(), ((BatchMessageIdImpl) nextMessageInQueue).getBatchIndex() - 1); } else { // Get on previous message in previous entry previousMessage = new BatchMessageIdImpl(nextMessageInQueue.getLedgerId(), nextMessageInQueue.getEntryId() - 1, nextMessageInQueue.getPartitionIndex(), -1); } return previousMessage; } else if (!lastDequeuedMessage.equals(MessageId.earliest)) { // If the queue was empty we need to restart from the message just after the last one that has been dequeued // in the past return new BatchMessageIdImpl((MessageIdImpl) lastDequeuedMessage); } else { // No message was received or dequeued by this consumer. Next message would still be the startMessageId return startMessageId; } } /** * send the flow command to have the broker start pushing messages */ void sendFlowPermitsToBroker(ClientCnx cnx, int numMessages) { if (cnx != null) { if (log.isDebugEnabled()) { log.debug("[{}] [{}] Adding {} additional permits", topic, subscription, numMessages); } cnx.ctx().writeAndFlush(Commands.newFlow(consumerId, numMessages), cnx.ctx().voidPromise()); } } @Override public void connectionFailed(PulsarClientException exception) { if (System.currentTimeMillis() > subscribeTimeout && subscribeFuture.completeExceptionally(exception)) { setState(State.Failed); log.info("[{}] Consumer creation failed for consumer {}", topic, consumerId); client.cleanupConsumer(this); } } @Override public CompletableFuture<Void> closeAsync() { if (getState() == State.Closing || getState() == State.Closed) { unAckedMessageTracker.close(); if (possibleSendToDeadLetterTopicMessages != null) { possibleSendToDeadLetterTopicMessages.clear(); } return CompletableFuture.completedFuture(null); } if (!isConnected()) { log.info("[{}] [{}] Closed Consumer (not connected)", topic, subscription); setState(State.Closed); unAckedMessageTracker.close(); if (possibleSendToDeadLetterTopicMessages != null) { possibleSendToDeadLetterTopicMessages.clear(); } client.cleanupConsumer(this); return CompletableFuture.completedFuture(null); } stats.getStatTimeout().ifPresent(Timeout::cancel); setState(State.Closing); acknowledgmentsGroupingTracker.close(); long requestId = client.newRequestId(); CompletableFuture<Void> closeFuture = new CompletableFuture<>(); ClientCnx cnx = cnx(); if (null == cnx) { cleanupAtClose(closeFuture); } else { ByteBuf cmd = Commands.newCloseConsumer(consumerId, requestId); cnx.sendRequestWithId(cmd, requestId).handle((v, exception) -> { cnx.removeConsumer(consumerId); if (exception == null || !cnx.ctx().channel().isActive()) { cleanupAtClose(closeFuture); } else { closeFuture.completeExceptionally(exception); } return null; }); } return closeFuture; } private void cleanupAtClose(CompletableFuture<Void> closeFuture) { log.info("[{}] [{}] Closed consumer", topic, subscription); setState(State.Closed); unAckedMessageTracker.close(); if (possibleSendToDeadLetterTopicMessages != null) { possibleSendToDeadLetterTopicMessages.clear(); } closeFuture.complete(null); client.cleanupConsumer(this); // fail all pending-receive futures to notify application failPendingReceive(); } private void failPendingReceive() { lock.readLock().lock(); try { if (listenerExecutor != null && !listenerExecutor.isShutdown()) { while (!pendingReceives.isEmpty()) { CompletableFuture<Message<T>> receiveFuture = pendingReceives.poll(); if (receiveFuture != null) { receiveFuture.completeExceptionally( new PulsarClientException.AlreadyClosedException("Consumer is already closed")); } else { break; } } } } finally { lock.readLock().unlock(); } } void activeConsumerChanged(boolean isActive) { if (consumerEventListener == null) { return; } listenerExecutor.execute(() -> { if (isActive) { consumerEventListener.becameActive(this, partitionIndex); } else { consumerEventListener.becameInactive(this, partitionIndex); } }); } void messageReceived(MessageIdData messageId, int redeliveryCount, ByteBuf headersAndPayload, ClientCnx cnx) { if (log.isDebugEnabled()) { log.debug("[{}][{}] Received message: {}/{}", topic, subscription, messageId.getLedgerId(), messageId.getEntryId()); } if (!verifyChecksum(headersAndPayload, messageId)) { // discard message with checksum error discardCorruptedMessage(messageId, cnx, ValidationError.ChecksumMismatch); return; } MessageMetadata msgMetadata; try { msgMetadata = Commands.parseMessageMetadata(headersAndPayload); } catch (Throwable t) { discardCorruptedMessage(messageId, cnx, ValidationError.ChecksumMismatch); return; } final int numMessages = msgMetadata.getNumMessagesInBatch(); MessageIdImpl msgId = new MessageIdImpl(messageId.getLedgerId(), messageId.getEntryId(), getPartitionIndex()); if (acknowledgmentsGroupingTracker.isDuplicate(msgId)) { if (log.isDebugEnabled()) { log.debug("[{}] [{}] Ignoring message as it was already being acked earlier by same consumer {}/{}", topic, subscription, consumerName, msgId); } increaseAvailablePermits(cnx, numMessages); return; } ByteBuf decryptedPayload = decryptPayloadIfNeeded(messageId, msgMetadata, headersAndPayload, cnx); boolean isMessageUndecryptable = isMessageUndecryptable(msgMetadata); if (decryptedPayload == null) { // Message was discarded or CryptoKeyReader isn't implemented return; } // uncompress decryptedPayload and release decryptedPayload-ByteBuf ByteBuf uncompressedPayload = isMessageUndecryptable ? decryptedPayload.retain() : uncompressPayloadIfNeeded(messageId, msgMetadata, decryptedPayload, cnx); decryptedPayload.release(); if (uncompressedPayload == null) { // Message was discarded on decompression error return; } // if message is not decryptable then it can't be parsed as a batch-message. so, add EncyrptionCtx to message // and return undecrypted payload if (isMessageUndecryptable || (numMessages == 1 && !msgMetadata.hasNumMessagesInBatch())) { final MessageImpl<T> message = new MessageImpl<>(topicName.toString(), msgId, msgMetadata, uncompressedPayload, createEncryptionContext(msgMetadata), cnx, schema, redeliveryCount); uncompressedPayload.release(); msgMetadata.recycle(); lock.readLock().lock(); try { // Enqueue the message so that it can be retrieved when application calls receive() // if the conf.getReceiverQueueSize() is 0 then discard message if no one is waiting for it. // if asyncReceive is waiting then notify callback without adding to incomingMessages queue if (deadLetterPolicy != null && possibleSendToDeadLetterTopicMessages != null && redeliveryCount >= deadLetterPolicy.getMaxRedeliverCount()) { possibleSendToDeadLetterTopicMessages.put((MessageIdImpl)message.getMessageId(), Collections.singletonList(message)); } if (!pendingReceives.isEmpty()) { trackMessage(message); notifyPendingReceivedCallback(message, null); } else if (canEnqueueMessage(message)) { incomingMessages.add(message); } } finally { lock.readLock().unlock(); } } else { // handle batch message enqueuing; uncompressed payload has all messages in batch receiveIndividualMessagesFromBatch(msgMetadata, redeliveryCount, uncompressedPayload, messageId, cnx); uncompressedPayload.release(); msgMetadata.recycle(); } if (listener != null) { triggerListener(numMessages); } } protected void triggerListener(int numMessages) { // Trigger the notification on the message listener in a separate thread to avoid blocking the networking // thread while the message processing happens listenerExecutor.execute(() -> { for (int i = 0; i < numMessages; i++) { try { Message<T> msg = internalReceive(0, TimeUnit.MILLISECONDS); // complete the callback-loop in case queue is cleared up if (msg == null) { if (log.isDebugEnabled()) { log.debug("[{}] [{}] Message has been cleared from the queue", topic, subscription); } break; } try { if (log.isDebugEnabled()) { log.debug("[{}][{}] Calling message listener for message {}", topic, subscription, msg.getMessageId()); } listener.received(ConsumerImpl.this, msg); } catch (Throwable t) { log.error("[{}][{}] Message listener error in processing message: {}", topic, subscription, msg.getMessageId(), t); } } catch (PulsarClientException e) { log.warn("[{}] [{}] Failed to dequeue the message for listener", topic, subscription, e); return; } } }); } protected boolean canEnqueueMessage(Message<T> message) { // Default behavior, can be overridden in subclasses return true; } /** * Notify waiting asyncReceive request with the received message * * @param message */ void notifyPendingReceivedCallback(final Message<T> message, Exception exception) { if (pendingReceives.isEmpty()) { return; } // fetch receivedCallback from queue final CompletableFuture<Message<T>> receivedFuture = pendingReceives.poll(); if (receivedFuture == null) { return; } if (exception != null) { listenerExecutor.execute(() -> receivedFuture.completeExceptionally(exception)); return; } if (message == null) { IllegalStateException e = new IllegalStateException("received message can't be null"); listenerExecutor.execute(() -> receivedFuture.completeExceptionally(e)); return; } if (conf.getReceiverQueueSize() == 0) { // call interceptor and complete received callback interceptAndComplete(message, receivedFuture); return; } // increase permits for available message-queue messageProcessed(message); // call interceptor and complete received callback interceptAndComplete(message, receivedFuture); } private void interceptAndComplete(final Message<T> message, final CompletableFuture<Message<T>> receivedFuture) { // call proper interceptor final Message<T> interceptMessage = beforeConsume(message); // return message to receivedCallback listenerExecutor.execute(() -> receivedFuture.complete(interceptMessage)); } void receiveIndividualMessagesFromBatch(MessageMetadata msgMetadata, int redeliveryCount, ByteBuf uncompressedPayload, MessageIdData messageId, ClientCnx cnx) { int batchSize = msgMetadata.getNumMessagesInBatch(); // create ack tracker for entry aka batch MessageIdImpl batchMessage = new MessageIdImpl(messageId.getLedgerId(), messageId.getEntryId(), getPartitionIndex()); BatchMessageAcker acker = BatchMessageAcker.newAcker(batchSize); List<MessageImpl<T>> possibleToDeadLetter = null; if (deadLetterPolicy != null && redeliveryCount >= deadLetterPolicy.getMaxRedeliverCount()) { possibleToDeadLetter = new ArrayList<>(); } int skippedMessages = 0; try { for (int i = 0; i < batchSize; ++i) { if (log.isDebugEnabled()) { log.debug("[{}] [{}] processing message num - {} in batch", subscription, consumerName, i); } PulsarApi.SingleMessageMetadata.Builder singleMessageMetadataBuilder = PulsarApi.SingleMessageMetadata .newBuilder(); ByteBuf singleMessagePayload = Commands.deSerializeSingleMessageInBatch(uncompressedPayload, singleMessageMetadataBuilder, i, batchSize); if (subscriptionMode == SubscriptionMode.NonDurable && startMessageId != null && messageId.getLedgerId() == startMessageId.getLedgerId() && messageId.getEntryId() == startMessageId.getEntryId() && i <= startMessageId.getBatchIndex()) { // If we are receiving a batch message, we need to discard messages that were prior // to the startMessageId if (log.isDebugEnabled()) { log.debug("[{}] [{}] Ignoring message from before the startMessageId", subscription, consumerName); } singleMessagePayload.release(); singleMessageMetadataBuilder.recycle(); ++skippedMessages; continue; } if (singleMessageMetadataBuilder.getCompactedOut()) { // message has been compacted out, so don't send to the user singleMessagePayload.release(); singleMessageMetadataBuilder.recycle(); ++skippedMessages; continue; } BatchMessageIdImpl batchMessageIdImpl = new BatchMessageIdImpl(messageId.getLedgerId(), messageId.getEntryId(), getPartitionIndex(), i, acker); final MessageImpl<T> message = new MessageImpl<>(topicName.toString(), batchMessageIdImpl, msgMetadata, singleMessageMetadataBuilder.build(), singleMessagePayload, createEncryptionContext(msgMetadata), cnx, schema, redeliveryCount); if (possibleToDeadLetter != null) { possibleToDeadLetter.add(message); } lock.readLock().lock(); try { if (pendingReceives.isEmpty()) { incomingMessages.add(message); } else { notifyPendingReceivedCallback(message, null); } } finally { lock.readLock().unlock(); } singleMessagePayload.release(); singleMessageMetadataBuilder.recycle(); } } catch (IOException e) { log.warn("[{}] [{}] unable to obtain message in batch", subscription, consumerName); discardCorruptedMessage(messageId, cnx, ValidationError.BatchDeSerializeError); } if (possibleToDeadLetter != null && possibleSendToDeadLetterTopicMessages != null) { possibleSendToDeadLetterTopicMessages.put(batchMessage, possibleToDeadLetter); } if (log.isDebugEnabled()) { log.debug("[{}] [{}] enqueued messages in batch. queue size - {}, available queue size - {}", subscription, consumerName, incomingMessages.size(), incomingMessages.remainingCapacity()); } if (skippedMessages > 0) { increaseAvailablePermits(cnx, skippedMessages); } } /** * Record the event that one message has been processed by the application. * * Periodically, it sends a Flow command to notify the broker that it can push more messages */ protected synchronized void messageProcessed(Message<?> msg) { ClientCnx currentCnx = cnx(); ClientCnx msgCnx = ((MessageImpl<?>) msg).getCnx(); lastDequeuedMessage = msg.getMessageId(); if (msgCnx != currentCnx) { // The processed message did belong to the old queue that was cleared after reconnection. return; } increaseAvailablePermits(currentCnx); stats.updateNumMsgsReceived(msg); if (conf.getAckTimeoutMillis() != 0) { // reset timer for messages that are received by the client MessageIdImpl id = (MessageIdImpl) msg.getMessageId(); if (id instanceof BatchMessageIdImpl) { id = new MessageIdImpl(id.getLedgerId(), id.getEntryId(), getPartitionIndex()); } if (partitionIndex != -1) { // we should no longer track this message, TopicsConsumer will take care from now onwards unAckedMessageTracker.remove(id); } else { unAckedMessageTracker.add(id); } } } protected void trackMessage(Message<?> msg) { if (msg != null) { MessageId messageId = msg.getMessageId(); if (conf.getAckTimeoutMillis() > 0 && messageId instanceof MessageIdImpl) { MessageIdImpl id = (MessageIdImpl)messageId; if (id instanceof BatchMessageIdImpl) { // do not add each item in batch message into tracker id = new MessageIdImpl(id.getLedgerId(), id.getEntryId(), getPartitionIndex()); } unAckedMessageTracker.add(id); } } } void increaseAvailablePermits(ClientCnx currentCnx) { increaseAvailablePermits(currentCnx, 1); } private void increaseAvailablePermits(ClientCnx currentCnx, int delta) { int available = AVAILABLE_PERMITS_UPDATER.addAndGet(this, delta); while (available >= receiverQueueRefillThreshold && !paused) { if (AVAILABLE_PERMITS_UPDATER.compareAndSet(this, available, 0)) { sendFlowPermitsToBroker(currentCnx, available); break; } else { available = AVAILABLE_PERMITS_UPDATER.get(this); } } } @Override public void pause() { paused = true; } @Override public void resume() { if (paused) { paused = false; increaseAvailablePermits(cnx(), 0); } } private ByteBuf decryptPayloadIfNeeded(MessageIdData messageId, MessageMetadata msgMetadata, ByteBuf payload, ClientCnx currentCnx) { if (msgMetadata.getEncryptionKeysCount() == 0) { return payload.retain(); } // If KeyReader is not configured throw exception based on config param if (conf.getCryptoKeyReader() == null) { switch (conf.getCryptoFailureAction()) { case CONSUME: log.warn("[{}][{}][{}] CryptoKeyReader interface is not implemented. Consuming encrypted message.", topic, subscription, consumerName); return payload.retain(); case DISCARD: log.warn( "[{}][{}][{}] Skipping decryption since CryptoKeyReader interface is not implemented and config is set to discard", topic, subscription, consumerName); discardMessage(messageId, currentCnx, ValidationError.DecryptionError); return null; case FAIL: MessageId m = new MessageIdImpl(messageId.getLedgerId(), messageId.getEntryId(), partitionIndex); log.error( "[{}][{}][{}][{}] Message delivery failed since CryptoKeyReader interface is not implemented to consume encrypted message", topic, subscription, consumerName, m); unAckedMessageTracker.add(m); return null; } } ByteBuf decryptedData = this.msgCrypto.decrypt(msgMetadata, payload, conf.getCryptoKeyReader()); if (decryptedData != null) { return decryptedData; } switch (conf.getCryptoFailureAction()) { case CONSUME: // Note, batch message will fail to consume even if config is set to consume log.warn("[{}][{}][{}][{}] Decryption failed. Consuming encrypted message since config is set to consume.", topic, subscription, consumerName, messageId); return payload.retain(); case DISCARD: log.warn("[{}][{}][{}][{}] Discarding message since decryption failed and config is set to discard", topic, subscription, consumerName, messageId); discardMessage(messageId, currentCnx, ValidationError.DecryptionError); return null; case FAIL: MessageId m = new MessageIdImpl(messageId.getLedgerId(), messageId.getEntryId(), partitionIndex); log.error( "[{}][{}][{}][{}] Message delivery failed since unable to decrypt incoming message", topic, subscription, consumerName, m); unAckedMessageTracker.add(m); return null; } return null; } private ByteBuf uncompressPayloadIfNeeded(MessageIdData messageId, MessageMetadata msgMetadata, ByteBuf payload, ClientCnx currentCnx) { CompressionType compressionType = msgMetadata.getCompression(); CompressionCodec codec = CompressionCodecProvider.getCompressionCodec(compressionType); int uncompressedSize = msgMetadata.getUncompressedSize(); int payloadSize = payload.readableBytes(); if (payloadSize > ClientCnx.getMaxMessageSize()) { // payload size is itself corrupted since it cannot be bigger than the MaxMessageSize log.error("[{}][{}] Got corrupted payload message size {} at {}", topic, subscription, payloadSize, messageId); discardCorruptedMessage(messageId, currentCnx, ValidationError.UncompressedSizeCorruption); return null; } try { ByteBuf uncompressedPayload = codec.decode(payload, uncompressedSize); return uncompressedPayload; } catch (IOException e) { log.error("[{}][{}] Failed to decompress message with {} at {}: {}", topic, subscription, compressionType, messageId, e.getMessage(), e); discardCorruptedMessage(messageId, currentCnx, ValidationError.DecompressionError); return null; } } private boolean verifyChecksum(ByteBuf headersAndPayload, MessageIdData messageId) { if (hasChecksum(headersAndPayload)) { int checksum = readChecksum(headersAndPayload); int computedChecksum = computeChecksum(headersAndPayload); if (checksum != computedChecksum) { log.error( "[{}][{}] Checksum mismatch for message at {}:{}. Received checksum: 0x{}, Computed checksum: 0x{}", topic, subscription, messageId.getLedgerId(), messageId.getEntryId(), Long.toHexString(checksum), Integer.toHexString(computedChecksum)); return false; } } return true; } private void discardCorruptedMessage(MessageIdData messageId, ClientCnx currentCnx, ValidationError validationError) { log.error("[{}][{}] Discarding corrupted message at {}:{}", topic, subscription, messageId.getLedgerId(), messageId.getEntryId()); discardMessage(messageId, currentCnx, validationError); } private void discardMessage(MessageIdData messageId, ClientCnx currentCnx, ValidationError validationError) { ByteBuf cmd = Commands.newAck(consumerId, messageId.getLedgerId(), messageId.getEntryId(), AckType.Individual, validationError, Collections.emptyMap()); currentCnx.ctx().writeAndFlush(cmd, currentCnx.ctx().voidPromise()); increaseAvailablePermits(currentCnx); stats.incrementNumReceiveFailed(); } @Override String getHandlerName() { return subscription; } @Override public boolean isConnected() { return getClientCnx() != null && (getState() == State.Ready); } int getPartitionIndex() { return partitionIndex; } @Override public int getAvailablePermits() { return AVAILABLE_PERMITS_UPDATER.get(this); } @Override public int numMessagesInQueue() { return incomingMessages.size(); } @Override public void redeliverUnacknowledgedMessages() { ClientCnx cnx = cnx(); if (isConnected() && cnx.getRemoteEndpointProtocolVersion() >= ProtocolVersion.v2.getNumber()) { int currentSize = 0; synchronized (this) { currentSize = incomingMessages.size(); incomingMessages.clear(); unAckedMessageTracker.clear(); } cnx.ctx().writeAndFlush(Commands.newRedeliverUnacknowledgedMessages(consumerId), cnx.ctx().voidPromise()); if (currentSize > 0) { increaseAvailablePermits(cnx, currentSize); } if (log.isDebugEnabled()) { log.debug("[{}] [{}] [{}] Redeliver unacked messages and send {} permits", subscription, topic, consumerName, currentSize); } return; } if (cnx == null || (getState() == State.Connecting)) { log.warn("[{}] Client Connection needs to be established for redelivery of unacknowledged messages", this); } else { log.warn("[{}] Reconnecting the client to redeliver the messages.", this); cnx.ctx().close(); } } @Override public void redeliverUnacknowledgedMessages(Set<MessageId> messageIds) { if (messageIds.isEmpty()) { return; } checkArgument(messageIds.stream().findFirst().get() instanceof MessageIdImpl); if (conf.getSubscriptionType() != SubscriptionType.Shared && conf.getSubscriptionType() != SubscriptionType.Key_Shared) { // We cannot redeliver single messages if subscription type is not Shared redeliverUnacknowledgedMessages(); return; } ClientCnx cnx = cnx(); if (isConnected() && cnx.getRemoteEndpointProtocolVersion() >= ProtocolVersion.v2.getNumber()) { int messagesFromQueue = removeExpiredMessagesFromQueue(messageIds); Iterable<List<MessageIdImpl>> batches = Iterables.partition( messageIds.stream() .map(messageId -> (MessageIdImpl)messageId) .collect(Collectors.toSet()), MAX_REDELIVER_UNACKNOWLEDGED); MessageIdData.Builder builder = MessageIdData.newBuilder(); batches.forEach(ids -> { List<MessageIdData> messageIdDatas = ids.stream().map(messageId -> { // process message possible to dead letter topic processPossibleToDLQ(messageId); // attempt to remove message from batchMessageAckTracker builder.setPartition(messageId.getPartitionIndex()); builder.setLedgerId(messageId.getLedgerId()); builder.setEntryId(messageId.getEntryId()); return builder.build(); }).collect(Collectors.toList()); ByteBuf cmd = Commands.newRedeliverUnacknowledgedMessages(consumerId, messageIdDatas); cnx.ctx().writeAndFlush(cmd, cnx.ctx().voidPromise()); messageIdDatas.forEach(MessageIdData::recycle); }); if (messagesFromQueue > 0) { increaseAvailablePermits(cnx, messagesFromQueue); } builder.recycle(); if (log.isDebugEnabled()) { log.debug("[{}] [{}] [{}] Redeliver unacked messages and increase {} permits", subscription, topic, consumerName, messagesFromQueue); } return; } if (cnx == null || (getState() == State.Connecting)) { log.warn("[{}] Client Connection needs to be established for redelivery of unacknowledged messages", this); } else { log.warn("[{}] Reconnecting the client to redeliver the messages.", this); cnx.ctx().close(); } } private void processPossibleToDLQ(MessageIdImpl messageId) { List<MessageImpl<T>> deadLetterMessages = null; if (possibleSendToDeadLetterTopicMessages != null) { if (messageId instanceof BatchMessageIdImpl) { deadLetterMessages = possibleSendToDeadLetterTopicMessages.get(new MessageIdImpl(messageId.getLedgerId(), messageId.getEntryId(), getPartitionIndex())); } else { deadLetterMessages = possibleSendToDeadLetterTopicMessages.get(messageId); } } if (deadLetterMessages != null) { if (deadLetterProducer == null) { try { deadLetterProducer = client.newProducer(schema) .topic(this.deadLetterPolicy.getDeadLetterTopic()) .blockIfQueueFull(false) .create(); } catch (Exception e) { log.error("Create dead letter producer exception with topic: {}", deadLetterPolicy.getDeadLetterTopic(), e); } } if (deadLetterProducer != null) { try { for (MessageImpl<T> message : deadLetterMessages) { deadLetterProducer.newMessage() .value(message.getValue()) .properties(message.getProperties()) .send(); } acknowledge(messageId); } catch (Exception e) { log.error("Send to dead letter topic exception with topic: {}, messageId: {}", deadLetterProducer.getTopic(), messageId, e); } } } } @Override public void seek(MessageId messageId) throws PulsarClientException { try { seekAsync(messageId).get(); } catch (ExecutionException | InterruptedException e) { throw new PulsarClientException(e); } } @Override public void seek(long timestamp) throws PulsarClientException { try { seekAsync(timestamp).get(); } catch (ExecutionException | InterruptedException e) { throw new PulsarClientException(e); } } @Override public CompletableFuture<Void> seekAsync(long timestamp) { if (getState() == State.Closing || getState() == State.Closed) { return FutureUtil .failedFuture(new PulsarClientException.AlreadyClosedException("Consumer was already closed")); } if (!isConnected()) { return FutureUtil.failedFuture(new PulsarClientException("Not connected to broker")); } final CompletableFuture<Void> seekFuture = new CompletableFuture<>(); long requestId = client.newRequestId(); ByteBuf seek = Commands.newSeek(consumerId, requestId, timestamp); ClientCnx cnx = cnx(); log.info("[{}][{}] Seek subscription to publish time {}", topic, subscription, timestamp); cnx.sendRequestWithId(seek, requestId).thenRun(() -> { log.info("[{}][{}] Successfully reset subscription to publish time {}", topic, subscription, timestamp); acknowledgmentsGroupingTracker.flushAndClean(); lastDequeuedMessage = MessageId.earliest; incomingMessages.clear(); seekFuture.complete(null); }).exceptionally(e -> { log.error("[{}][{}] Failed to reset subscription: {}", topic, subscription, e.getCause().getMessage()); seekFuture.completeExceptionally(e.getCause()); return null; }); return seekFuture; } @Override public CompletableFuture<Void> seekAsync(MessageId messageId) { if (getState() == State.Closing || getState() == State.Closed) { return FutureUtil .failedFuture(new PulsarClientException.AlreadyClosedException("Consumer was already closed")); } if (!isConnected()) { return FutureUtil.failedFuture(new PulsarClientException("Not connected to broker")); } final CompletableFuture<Void> seekFuture = new CompletableFuture<>(); long requestId = client.newRequestId(); MessageIdImpl msgId = (MessageIdImpl) messageId; ByteBuf seek = Commands.newSeek(consumerId, requestId, msgId.getLedgerId(), msgId.getEntryId()); ClientCnx cnx = cnx(); log.info("[{}][{}] Seek subscription to message id {}", topic, subscription, messageId); cnx.sendRequestWithId(seek, requestId).thenRun(() -> { log.info("[{}][{}] Successfully reset subscription to message id {}", topic, subscription, messageId); acknowledgmentsGroupingTracker.flushAndClean(); lastDequeuedMessage = messageId; incomingMessages.clear(); seekFuture.complete(null); }).exceptionally(e -> { log.error("[{}][{}] Failed to reset subscription: {}", topic, subscription, e.getCause().getMessage()); seekFuture.completeExceptionally(e.getCause()); return null; }); return seekFuture; } public boolean hasMessageAvailable() throws PulsarClientException { try { if (hasMoreMessages(lastMessageIdInBroker, lastDequeuedMessage)) { return true; } return hasMessageAvailableAsync().get(); } catch (ExecutionException | InterruptedException e) { throw new PulsarClientException(e); } } public CompletableFuture<Boolean> hasMessageAvailableAsync() { final CompletableFuture<Boolean> booleanFuture = new CompletableFuture<>(); if (hasMoreMessages(lastMessageIdInBroker, lastDequeuedMessage)) { booleanFuture.complete(true); } else { getLastMessageIdAsync().thenAccept(messageId -> { lastMessageIdInBroker = messageId; if (hasMoreMessages(lastMessageIdInBroker, lastDequeuedMessage)) { booleanFuture.complete(true); } else { booleanFuture.complete(false); } }).exceptionally(e -> { log.error("[{}][{}] Failed getLastMessageId command", topic, subscription); booleanFuture.completeExceptionally(e.getCause()); return null; }); } return booleanFuture; } private boolean hasMoreMessages(MessageId lastMessageIdInBroker, MessageId lastDequeuedMessage) { if (lastMessageIdInBroker.compareTo(lastDequeuedMessage) > 0 && ((MessageIdImpl)lastMessageIdInBroker).getEntryId() != -1) { return true; } else { // Make sure batching message can be read completely. return lastMessageIdInBroker.compareTo(lastDequeuedMessage) == 0 && incomingMessages.size() > 0; } } CompletableFuture<MessageId> getLastMessageIdAsync() { if (getState() == State.Closing || getState() == State.Closed) { return FutureUtil .failedFuture(new PulsarClientException.AlreadyClosedException("Consumer was already closed")); } AtomicLong opTimeoutMs = new AtomicLong(client.getConfiguration().getOperationTimeoutMs()); Backoff backoff = new BackoffBuilder() .setInitialTime(100, TimeUnit.MILLISECONDS) .setMax(opTimeoutMs.get() * 2, TimeUnit.MILLISECONDS) .setMandatoryStop(0, TimeUnit.MILLISECONDS) .useUserConfiguredIntervals(backoffIntervalNanos, maxBackoffIntervalNanos) .create(); CompletableFuture<MessageId> getLastMessageIdFuture = new CompletableFuture<>(); internalGetLastMessageIdAsync(backoff, opTimeoutMs, getLastMessageIdFuture); return getLastMessageIdFuture; } private void internalGetLastMessageIdAsync(final Backoff backoff, final AtomicLong remainingTime, CompletableFuture<MessageId> future) { ClientCnx cnx = cnx(); if (isConnected() && cnx != null) { if (!Commands.peerSupportsGetLastMessageId(cnx.getRemoteEndpointProtocolVersion())) { future.completeExceptionally(new PulsarClientException .NotSupportedException("GetLastMessageId Not supported for ProtocolVersion: " + cnx.getRemoteEndpointProtocolVersion())); } long requestId = client.newRequestId(); ByteBuf getLastIdCmd = Commands.newGetLastMessageId(consumerId, requestId); log.info("[{}][{}] Get topic last message Id", topic, subscription); cnx.sendGetLastMessageId(getLastIdCmd, requestId).thenAccept((result) -> { log.info("[{}][{}] Successfully getLastMessageId {}:{}", topic, subscription, result.getLedgerId(), result.getEntryId()); future.complete(new MessageIdImpl(result.getLedgerId(), result.getEntryId(), result.getPartition())); }).exceptionally(e -> { log.error("[{}][{}] Failed getLastMessageId command", topic, subscription); future.completeExceptionally(e.getCause()); return null; }); } else { long nextDelay = Math.min(backoff.next(), remainingTime.get()); if (nextDelay <= 0) { future.completeExceptionally(new PulsarClientException .TimeoutException("Could not getLastMessageId within configured timeout.")); return; } ((ScheduledExecutorService) listenerExecutor).schedule(() -> { log.warn("[{}] [{}] Could not get connection while getLastMessageId -- Will try again in {} ms", topic, getHandlerName(), nextDelay); remainingTime.addAndGet(-nextDelay); internalGetLastMessageIdAsync(backoff, remainingTime, future); }, nextDelay, TimeUnit.MILLISECONDS); } } private MessageIdImpl getMessageIdImpl(Message<?> msg) { MessageIdImpl messageId = (MessageIdImpl) msg.getMessageId(); if (messageId instanceof BatchMessageIdImpl) { // messageIds contain MessageIdImpl, not BatchMessageIdImpl messageId = new MessageIdImpl(messageId.getLedgerId(), messageId.getEntryId(), getPartitionIndex()); } return messageId; } private boolean isMessageUndecryptable(MessageMetadata msgMetadata) { return (msgMetadata.getEncryptionKeysCount() > 0 && conf.getCryptoKeyReader() == null && conf.getCryptoFailureAction() == ConsumerCryptoFailureAction.CONSUME); } /** * Create EncryptionContext if message payload is encrypted * * @param msgMetadata * @return {@link Optional}<{@link EncryptionContext}> */ private Optional<EncryptionContext> createEncryptionContext(MessageMetadata msgMetadata) { EncryptionContext encryptionCtx = null; if (msgMetadata.getEncryptionKeysCount() > 0) { encryptionCtx = new EncryptionContext(); Map<String, EncryptionKey> keys = msgMetadata.getEncryptionKeysList().stream() .collect( Collectors.toMap(EncryptionKeys::getKey, e -> new EncryptionKey(e.getValue().toByteArray(), e.getMetadataList() != null ? e.getMetadataList().stream().collect( Collectors.toMap(KeyValue::getKey, KeyValue::getValue)) : null))); byte[] encParam = new byte[MessageCrypto.ivLen]; msgMetadata.getEncryptionParam().copyTo(encParam, 0); Optional<Integer> batchSize = Optional .ofNullable(msgMetadata.hasNumMessagesInBatch() ? msgMetadata.getNumMessagesInBatch() : null); encryptionCtx.setKeys(keys); encryptionCtx.setParam(encParam); encryptionCtx.setAlgorithm(msgMetadata.getEncryptionAlgo()); encryptionCtx .setCompressionType(CompressionCodecProvider.convertFromWireProtocol(msgMetadata.getCompression())); encryptionCtx.setUncompressedMessageSize(msgMetadata.getUncompressedSize()); encryptionCtx.setBatchSize(batchSize); } return Optional.ofNullable(encryptionCtx); } private int removeExpiredMessagesFromQueue(Set<MessageId> messageIds) { int messagesFromQueue = 0; Message<T> peek = incomingMessages.peek(); if (peek != null) { MessageIdImpl messageId = getMessageIdImpl(peek); if (!messageIds.contains(messageId)) { // first message is not expired, then no message is expired in queue. return 0; } // try not to remove elements that are added while we remove Message<T> message = incomingMessages.poll(); while (message != null) { messagesFromQueue++; MessageIdImpl id = getMessageIdImpl(message); if (!messageIds.contains(id)) { messageIds.add(id); break; } message = incomingMessages.poll(); } } return messagesFromQueue; } @Override public ConsumerStats getStats() { return stats; } void setTerminated() { log.info("[{}] [{}] [{}] Consumer has reached the end of topic", subscription, topic, consumerName); hasReachedEndOfTopic = true; if (listener != null) { // Propagate notification to listener listener.reachedEndOfTopic(this); } } @Override public boolean hasReachedEndOfTopic() { return hasReachedEndOfTopic; } @Override public int hashCode() { return Objects.hash(topic, subscription, consumerName); } // wrapper for connection methods ClientCnx cnx() { return this.connectionHandler.cnx(); } void resetBackoff() { this.connectionHandler.resetBackoff(); } void connectionClosed(ClientCnx cnx) { this.connectionHandler.connectionClosed(cnx); } @VisibleForTesting public ClientCnx getClientCnx() { return this.connectionHandler.getClientCnx(); } void setClientCnx(ClientCnx clientCnx) { this.connectionHandler.setClientCnx(clientCnx); } void reconnectLater(Throwable exception) { this.connectionHandler.reconnectLater(exception); } void grabCnx() { this.connectionHandler.grabCnx(); } public String getTopicNameWithoutPartition() { return topicNameWithoutPartition; } private static final Logger log = LoggerFactory.getLogger(ConsumerImpl.class); }
Don't request redelivery of dead letter messages (#4426) If we send a message to the dead letter topic, we shouldn't also request that the same message be redelivered. The process to send to DLQ does do an acknowledge(), but acknowledge is async and not guaranteed, so it can race with the redelivery request.
pulsar-client/src/main/java/org/apache/pulsar/client/impl/ConsumerImpl.java
Don't request redelivery of dead letter messages (#4426)
<ide><path>ulsar-client/src/main/java/org/apache/pulsar/client/impl/ConsumerImpl.java <ide> .collect(Collectors.toSet()), MAX_REDELIVER_UNACKNOWLEDGED); <ide> MessageIdData.Builder builder = MessageIdData.newBuilder(); <ide> batches.forEach(ids -> { <del> List<MessageIdData> messageIdDatas = ids.stream().map(messageId -> { <del> // process message possible to dead letter topic <del> processPossibleToDLQ(messageId); <del> // attempt to remove message from batchMessageAckTracker <del> builder.setPartition(messageId.getPartitionIndex()); <del> builder.setLedgerId(messageId.getLedgerId()); <del> builder.setEntryId(messageId.getEntryId()); <del> return builder.build(); <del> }).collect(Collectors.toList()); <add> List<MessageIdData> messageIdDatas = ids.stream() <add> .filter(messageId -> !processPossibleToDLQ(messageId)) <add> .map(messageId -> { <add> builder.setPartition(messageId.getPartitionIndex()); <add> builder.setLedgerId(messageId.getLedgerId()); <add> builder.setEntryId(messageId.getEntryId()); <add> return builder.build(); <add> }).collect(Collectors.toList()); <add> <ide> ByteBuf cmd = Commands.newRedeliverUnacknowledgedMessages(consumerId, messageIdDatas); <ide> cnx.ctx().writeAndFlush(cmd, cnx.ctx().voidPromise()); <ide> messageIdDatas.forEach(MessageIdData::recycle); <ide> } <ide> } <ide> <del> private void processPossibleToDLQ(MessageIdImpl messageId) { <add> private boolean processPossibleToDLQ(MessageIdImpl messageId) { <ide> List<MessageImpl<T>> deadLetterMessages = null; <ide> if (possibleSendToDeadLetterTopicMessages != null) { <ide> if (messageId instanceof BatchMessageIdImpl) { <ide> .send(); <ide> } <ide> acknowledge(messageId); <add> return true; <ide> } catch (Exception e) { <ide> log.error("Send to dead letter topic exception with topic: {}, messageId: {}", deadLetterProducer.getTopic(), messageId, e); <ide> } <ide> } <ide> } <add> return false; <ide> } <ide> <ide> @Override
Java
agpl-3.0
d41018a18f1a68bff7597c8242b50808ce32e062
0
mnlipp/jgrapes,mnlipp/jgrapes
/* * JGrapes Event Driven Framework * Copyright (C) 2022 Michael N. Lipp * * This program is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License as published by * the Free Software Foundation; either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, but * WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * for more details. * * You should have received a copy of the GNU General Public License along * with this program; if not, see <http://www.gnu.org/licenses/>. */ package org.jgrapes.util; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Optional; import java.util.Set; import java.util.function.Function; import java.util.stream.Collectors; import org.jgrapes.core.Channel; import org.jgrapes.core.Component; import org.jgrapes.core.ComponentFactory; import org.jgrapes.core.Components; import org.jgrapes.core.Event; import org.jgrapes.core.Manager; import org.jgrapes.core.annotation.Handler; import org.jgrapes.util.events.ConfigurationUpdate; /** * Provides child components dynamically using {@link ComponentFactory}s. * * An instance is configured with a collection of {@link ComponentFactory}s * (see {@link #setFactories(ComponentFactory...)}) and component * configurations (see {@link #setPinned(List)} and * {@link #onConfigurationUpdate(ConfigurationUpdate)}). For each * configuration where the referenced factory exists, a component is * created and attached to this component provider as child. * * The component configurations can be updated by * {@link ConfigurationUpdate} events. * * @since 1.3 */ @SuppressWarnings("PMD.DataflowAnomalyAnalysis") public class ComponentProvider extends Component { /** The entry name for the component's type. */ public static final String COMPONENT_TYPE = "componentType"; /** The entry name for the component's name. */ public static final String COMPONENT_NAME = "name"; private String componentsEntry = "components"; private Map<String, ComponentFactory> factoryByType; private List<Map<?, ?>> currentConfig = Collections.emptyList(); private List<Map<?, ?>> pinnedConfigurations = Collections.emptyList(); /** * Creates a new component with its channel set to this object. */ public ComponentProvider() { this(Channel.SELF); } /** * Creates a new component with its channel set to the given * channel. * * @param componentChannel the channel that the component's * handlers listen on by default and that * {@link Manager#fire(Event, Channel...)} sends the event to */ public ComponentProvider(Channel componentChannel) { super(componentChannel); } /** * Sets the name of the entry in a {@link ConfigurationUpdate} event, * that hold s the information about the components to be provided. * Defaults to "components". * * @param name the name of the entry * @return the component provider for easy chaining */ public ComponentProvider setComponentsEntry(String name) { this.componentsEntry = name; return this; } /** * Sets the factories that this provider knows about. Only * configurations with a component type that matches one * of the factories are handled by this provider. * * @param factories the factories * @return the component provider for easy chaining */ public ComponentProvider setFactories(ComponentFactory... factories) { factoryByType = Arrays.stream(factories).collect(Collectors .toMap(f -> f.componentType().getName(), Function.identity(), (a, b) -> b)); synchronize(currentConfig); return this; } /** * Sets the pinned configurations. These configurations are * in effect independent of any information passed by * {@link ConfigurationUpdate} events. * * @param pinnedConfigurations the configurations to be pinned * @return the component provider for easy chaining */ public ComponentProvider setPinned(List<Map<?, ?>> pinnedConfigurations) { this.pinnedConfigurations = pinnedConfigurations; synchronize(currentConfig); return this; } /** * Selects configuration information targeted at this component * from the event. The default implementation invokes * {@link ConfigurationUpdate#structured(String)} with this * component's path to obtain the information. Called by * {@link #componentConfigurations(ConfigurationUpdate)}. * * @param evt the event * @return the configuration information as provided by * {@link ConfigurationUpdate#structured(String)} if it exists * */ protected Optional<Map<String, ?>> providerConfiguration(ConfigurationUpdate evt) { return evt.structured(componentPath()); } /** * Returns the configurations for the components to be provided. * Overriding this method enables derived classes to fully * control how this information is retrieved from the * {@link ConfigurationUpdate} event. * * This implementation of the method calls * {@link #componentConfigurations(ConfigurationUpdate)} to obtain * all configuration information targeted at this component. * It then uses the configured entry * (see {@link #setComponentsEntry(String)}) to retrieve the information * about the components to be provided. * * The method must ensure that the result is a collection * of maps, where each map has at least entries with * keys "componentType" and "name", each associated with a * value of type {@link String}. * * @param evt the event * @return the collection */ @SuppressWarnings("PMD.AvoidDuplicateLiterals") protected List<Map<?, ?>> componentConfigurations(ConfigurationUpdate evt) { return providerConfiguration(evt) .map(conf -> conf.get(componentsEntry)) .filter(Collection.class::isInstance).map(c -> (Collection<?>) c) .orElse(Collections.emptyList()).stream() .filter(Map.class::isInstance).map(c -> (Map<?, ?>) c) .filter(c -> c.keySet() .containsAll(Set.of(COMPONENT_TYPE, COMPONENT_NAME)) && String.class.isInstance(c.get(COMPONENT_TYPE)) && String.class.isInstance(c.get(COMPONENT_NAME))) .map(c -> { @SuppressWarnings("unchecked") // Checked for relevant entries var casted = (Map<String, String>) c; return casted; }) .collect(Collectors.toList()); } /** * Uses the information from the event to configure the * provided components. * * @param evt the event */ @Handler public void onConfigurationUpdate(ConfigurationUpdate evt) { synchronize(componentConfigurations(evt)); } @SuppressWarnings("PMD.AvoidSynchronizedAtMethodLevel") private synchronized void synchronize(List<Map<?, ?>> requested) { // Calculate starters for to be added/to be removed var toBeAdded = new LinkedList<>(requested); toBeAdded.addAll(pinnedConfigurations); var toBeRemoved = children().stream() .map(c -> Components.manager(c)) .collect(Collectors.toCollection(LinkedList::new)); // Don't attempt to add something that we have no factory for. toBeAdded = toBeAdded.stream() .filter(c -> factoryByType.containsKey(c.get(COMPONENT_TYPE))) .collect(Collectors.toCollection(LinkedList::new)); // Remove the intersection of "to be added" and "to be removed" from // both, thus leaving what their names say. for (var childIter = toBeRemoved.iterator(); childIter.hasNext();) { var child = childIter.next(); @SuppressWarnings("PMD.DataflowAnomalyAnalysis") var childComp = child.component().getClass().getName(); var childName = child.name(); for (var confIter = toBeAdded.iterator(); confIter.hasNext();) { var config = confIter.next(); var confComp = config.get(COMPONENT_TYPE); var confName = config.get(COMPONENT_NAME); if (confComp.equals(childComp) && Objects.equals(childName, confName)) { confIter.remove(); childIter.remove(); } } } // Update children for (var child : toBeRemoved) { child.detach(); } toBeAdded.stream().map(config -> { return factoryByType.get(config.get(COMPONENT_TYPE)) .create(channel(), config).map( c -> ComponentFactory.setStandardProperties(c, config)) .stream(); }).flatMap(Function.identity()) .forEach(component -> attach(component)); // Save configuration as current currentConfig = requested; } }
org.jgrapes.util/src/org/jgrapes/util/ComponentProvider.java
/* * JGrapes Event Driven Framework * Copyright (C) 2022 Michael N. Lipp * * This program is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License as published by * the Free Software Foundation; either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, but * WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * for more details. * * You should have received a copy of the GNU General Public License along * with this program; if not, see <http://www.gnu.org/licenses/>. */ package org.jgrapes.util; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Optional; import java.util.Set; import java.util.function.Function; import java.util.stream.Collectors; import org.jgrapes.core.Channel; import org.jgrapes.core.Component; import org.jgrapes.core.ComponentFactory; import org.jgrapes.core.Components; import org.jgrapes.core.Event; import org.jgrapes.core.Manager; import org.jgrapes.core.annotation.Handler; import org.jgrapes.util.events.ConfigurationUpdate; /** * Provides child components dynamically using {@link ComponentFactory}s. * * An instance is configured with a collection of {@link ComponentFactory}s * (see {@link #setFactories(ComponentFactory...)}) and component * configurations (see {@link #setPinned(List)} and * {@link #onConfigurationUpdate(ConfigurationUpdate)}). For each * configuration where the referenced factory exists, a component is * created and attached to this component provider as child. * * The component configurations can be updated by * {@link ConfigurationUpdate} events. * * @since 1.3 */ @SuppressWarnings("PMD.DataflowAnomalyAnalysis") public class ComponentProvider extends Component { private String componentsEntry = "components"; private Map<String, ComponentFactory> factoryByType; private List<Map<?, ?>> currentConfig = Collections.emptyList(); private List<Map<?, ?>> pinnedConfigurations = Collections.emptyList(); /** * Creates a new component with its channel set to this object. */ public ComponentProvider() { this(Channel.SELF); } /** * Creates a new component with its channel set to the given * channel. * * @param componentChannel the channel that the component's * handlers listen on by default and that * {@link Manager#fire(Event, Channel...)} sends the event to */ public ComponentProvider(Channel componentChannel) { super(componentChannel); } /** * Sets the name of the entry in a {@link ConfigurationUpdate} event, * that hold s the information about the components to be provided. * Defaults to "components". * * @param name the name of the entry * @return the component provider for easy chaining */ public ComponentProvider setComponentsEntry(String name) { this.componentsEntry = name; return this; } /** * Sets the factories that this provider knows about. Only * configurations with a component type that matches one * of the factories are handled by this provider. * * @param factories the factories * @return the component provider for easy chaining */ public ComponentProvider setFactories(ComponentFactory... factories) { factoryByType = Arrays.stream(factories).collect(Collectors .toMap(f -> f.componentType().getName(), Function.identity(), (a, b) -> b)); synchronize(currentConfig); return this; } /** * Sets the pinned configurations. These configurations are * in effect independent of any information passed by * {@link ConfigurationUpdate} events. * * @param pinnedConfigurations the configurations to be pinned * @return the component provider for easy chaining */ public ComponentProvider setPinned(List<Map<?, ?>> pinnedConfigurations) { this.pinnedConfigurations = pinnedConfigurations; synchronize(currentConfig); return this; } /** * Selects configuration information targeted at this component * from the event. The default implementation invokes * {@link ConfigurationUpdate#structured(String)} with this * component's path to obtain the information. Called by * {@link #componentConfigurations(ConfigurationUpdate)}. * * @param evt the event * @return the configuration information as provided by * {@link ConfigurationUpdate#structured(String)} if it exists * */ protected Optional<Map<String, ?>> providerConfiguration(ConfigurationUpdate evt) { return evt.structured(componentPath()); } /** * Returns the configurations for the components to be provided. * Overriding this method enables derived classes to fully * control how this information is retrieved from the * {@link ConfigurationUpdate} event. * * This implementation of the method calls * {@link #componentConfigurations(ConfigurationUpdate)} to obtain * all configuration information targeted at this component. * It then uses the configured entry * (see {@link #setComponentsEntry(String)}) to retrieve the information * about the components to be provided. * * The method must ensure that the result is a collection * of maps, where each map has at least entries with * keys "componentType" and "name", each associated with a * value of type {@link String}. * * @param evt the event * @return the collection */ @SuppressWarnings("PMD.AvoidDuplicateLiterals") protected List<Map<?, ?>> componentConfigurations(ConfigurationUpdate evt) { return providerConfiguration(evt) .map(conf -> conf.get(componentsEntry)) .filter(Collection.class::isInstance).map(c -> (Collection<?>) c) .orElse(Collections.emptyList()).stream() .filter(Map.class::isInstance).map(c -> (Map<?, ?>) c) .filter(c -> c.keySet().containsAll(Set.of("componentType", "name")) && String.class.isInstance(c.get("componentType")) && String.class.isInstance(c.get("name"))) .map(c -> { @SuppressWarnings("unchecked") // Checked for relevant entries var casted = (Map<String, String>) c; return casted; }) .collect(Collectors.toList()); } /** * Uses the information from the event to configure the * provided components. * * @param evt the event */ @Handler public void onConfigurationUpdate(ConfigurationUpdate evt) { synchronize(componentConfigurations(evt)); } @SuppressWarnings("PMD.AvoidSynchronizedAtMethodLevel") private synchronized void synchronize(List<Map<?, ?>> requested) { // Calculate starters for to be added/to be removed var toBeAdded = new LinkedList<>(requested); toBeAdded.addAll(pinnedConfigurations); var toBeRemoved = children().stream() .map(c -> Components.manager(c)) .collect(Collectors.toCollection(LinkedList::new)); // Don't attempt to add something that we have no factory for. toBeAdded = toBeAdded.stream() .filter(c -> factoryByType.containsKey(c.get("componentType"))) .collect(Collectors.toCollection(LinkedList::new)); // Remove the intersection of "to be added" and "to be removed" from // both, thus leaving what their names say. for (var childIter = toBeRemoved.iterator(); childIter.hasNext();) { var child = childIter.next(); @SuppressWarnings("PMD.DataflowAnomalyAnalysis") var childComp = child.component().getClass().getName(); var childName = child.name(); for (var confIter = toBeAdded.iterator(); confIter.hasNext();) { var config = confIter.next(); var confComp = config.get("componentType"); var confName = config.get("name"); if (confComp.equals(childComp) && Objects.equals(childName, confName)) { confIter.remove(); childIter.remove(); } } } // Update children for (var child : toBeRemoved) { child.detach(); } toBeAdded.stream().map(config -> { return factoryByType.get(config.get("componentType")) .create(channel(), config).map( c -> ComponentFactory.setStandardProperties(c, config)) .stream(); }).flatMap(Function.identity()) .forEach(component -> attach(component)); // Save configuration as current currentConfig = requested; } }
Use constants.
org.jgrapes.util/src/org/jgrapes/util/ComponentProvider.java
Use constants.
<ide><path>rg.jgrapes.util/src/org/jgrapes/util/ComponentProvider.java <ide> @SuppressWarnings("PMD.DataflowAnomalyAnalysis") <ide> public class ComponentProvider extends Component { <ide> <add> /** The entry name for the component's type. */ <add> public static final String COMPONENT_TYPE = "componentType"; <add> /** The entry name for the component's name. */ <add> public static final String COMPONENT_NAME = "name"; <add> <ide> private String componentsEntry = "components"; <ide> private Map<String, ComponentFactory> factoryByType; <ide> private List<Map<?, ?>> currentConfig = Collections.emptyList(); <ide> .filter(Collection.class::isInstance).map(c -> (Collection<?>) c) <ide> .orElse(Collections.emptyList()).stream() <ide> .filter(Map.class::isInstance).map(c -> (Map<?, ?>) c) <del> .filter(c -> c.keySet().containsAll(Set.of("componentType", "name")) <del> && String.class.isInstance(c.get("componentType")) <del> && String.class.isInstance(c.get("name"))) <add> .filter(c -> c.keySet() <add> .containsAll(Set.of(COMPONENT_TYPE, COMPONENT_NAME)) <add> && String.class.isInstance(c.get(COMPONENT_TYPE)) <add> && String.class.isInstance(c.get(COMPONENT_NAME))) <ide> .map(c -> { <ide> @SuppressWarnings("unchecked") // Checked for relevant entries <ide> var casted = (Map<String, String>) c; <ide> <ide> // Don't attempt to add something that we have no factory for. <ide> toBeAdded = toBeAdded.stream() <del> .filter(c -> factoryByType.containsKey(c.get("componentType"))) <add> .filter(c -> factoryByType.containsKey(c.get(COMPONENT_TYPE))) <ide> .collect(Collectors.toCollection(LinkedList::new)); <ide> <ide> // Remove the intersection of "to be added" and "to be removed" from <ide> for (var confIter = toBeAdded.iterator(); <ide> confIter.hasNext();) { <ide> var config = confIter.next(); <del> var confComp = config.get("componentType"); <del> var confName = config.get("name"); <add> var confComp = config.get(COMPONENT_TYPE); <add> var confName = config.get(COMPONENT_NAME); <ide> if (confComp.equals(childComp) <ide> && Objects.equals(childName, confName)) { <ide> confIter.remove(); <ide> child.detach(); <ide> } <ide> toBeAdded.stream().map(config -> { <del> return factoryByType.get(config.get("componentType")) <add> return factoryByType.get(config.get(COMPONENT_TYPE)) <ide> .create(channel(), config).map( <ide> c -> ComponentFactory.setStandardProperties(c, config)) <ide> .stream();
JavaScript
agpl-3.0
a2338f8e55054e02cfbc3ffadb7671686e0d71d4
0
havt/openerp-web,havt/openerp-web,havt/openerp-web,akretion/openerp-web,havt/openerp-web,akretion/openerp-web,akretion/openerp-web,havt/openerp-web
openerp.base.search = function(openerp) { openerp.base.SearchView = openerp.base.Widget.extend({ init: function(parent, element_id, dataset, view_id, defaults) { this._super(parent, element_id); this.view_manager = parent || new openerp.base.NullViewManager(); this.dataset = dataset; this.model = dataset.model; this.view_id = view_id; this.defaults = defaults || {}; this.inputs = []; this.enabled_filters = []; this.has_focus = false; this.ready = $.Deferred(); }, start: function() { //this.log('Starting SearchView '+this.model+this.view_id) this.rpc("/base/searchview/load", {"model": this.model, "view_id":this.view_id}, this.on_loaded); return this.ready.promise(); }, show: function () { this.$element.show(); }, hide: function () { this.$element.hide(); }, /** * Builds a list of widget rows (each row is an array of widgets) * * @param {Array} items a list of nodes to convert to widgets * @param {Object} fields a mapping of field names to (ORM) field attributes * @returns Array */ make_widgets: function (items, fields) { var rows = [], row = []; rows.push(row); var filters = []; _.each(items, function (item) { if (filters.length && item.tag !== 'filter') { row.push( new openerp.base.search.FilterGroup( filters, this)); filters = []; } if (item.tag === 'newline') { row = []; rows.push(row); } else if (item.tag === 'filter') { if (!this.has_focus) { item.attrs.default_focus = '1'; this.has_focus = true; } filters.push( new openerp.base.search.Filter( item, this)); } else if (item.tag === 'separator') { // a separator is a no-op } else { if (item.tag === 'group') { // TODO: group and field should be fetched from registries, maybe even filters row.push( new openerp.base.search.Group( item, this, fields)); } else if (item.tag === 'field') { if (!this.has_focus) { item.attrs.default_focus = '1'; this.has_focus = true; } row.push( this.make_field( item, fields[item['attrs'].name])); } } }, this); if (filters.length) { row.push(new openerp.base.search.FilterGroup(filters, this)); } return rows; }, /** * Creates a field for the provided field descriptor item (which comes * from fields_view_get) * * @param {Object} item fields_view_get node for the field * @param {Object} field fields_get result for the field * @returns openerp.base.search.Field */ make_field: function (item, field) { try { return new (openerp.base.search.fields.get_object(field.type)) (item, field, this); } catch (e) { if (! e instanceof openerp.base.KeyNotFound) { throw e; } // KeyNotFound means unknown field type console.group('Unknown field type ' + field.type); console.error('View node', item); console.info('View field', field); console.info('In view', this); console.groupEnd(); return null; } }, on_loaded: function(data) { var self = this, lines = this.make_widgets( data.fields_view['arch'].children, data.fields_view.fields); // for extended search view var ext = new openerp.base.search.ExtendedSearch(this, this.model); lines.push([ext]); this.inputs.push(ext); var render = QWeb.render("SearchView", { 'view': data.fields_view['arch'], 'lines': lines, 'defaults': this.defaults }); // We don't understand why the following commented line does not work in Chrome but // the non-commented line does. As far as we investigated, only God knows. //this.$element.html(render); jQuery(render).appendTo(this.$element); this.$element.find(".oe_search-view-custom-filter-btn").click(ext.on_activate); var f = this.$element.find('form'); this.$element.find('form') .submit(this.do_search) .bind('reset', this.do_clear); // start() all the widgets var widget_starts = _(lines).chain().flatten().map(function (widget) { return widget.start(); }).value(); $.when.apply(null, widget_starts).then(function () { self.ready.resolve(); }); this.reload_managed_filters(); }, reload_managed_filters: function() { var self = this; return this.rpc('/base/searchview/get_filters', { model: this.dataset.model }).then(function(result) { self.managed_filters = result; var filters = self.$element.find(".oe_search-view-filters-management"); filters.html(QWeb.render("SearchView.managed-filters", {filters: result})); filters.change(self.on_filters_management); }); }, /** * Handle event when the user make a selection in the filters management select box. */ on_filters_management: function(e) { var self = this; var select = this.$element.find(".oe_search-view-filters-management"); var val = select.val(); if (val.slice(0,1) == "_") { // useless action select.val("_filters"); return; } if (val.slice(0, "get:".length) == "get:") { val = val.slice("get:".length); val = parseInt(val); var filter = this.managed_filters[val]; this.on_search([filter.domain], [filter.context], []); } else if (val == "save_filter") { select.val("_filters"); var data = this.build_search_data(); var context = new openerp.base.CompoundContext(); _.each(data.contexts, function(x) { context.add(x); }); var domain = new openerp.base.CompoundDomain(); _.each(data.domains, function(x) { domain.add(x); }); var dial_html = QWeb.render("SearchView.managed-filters.add"); var $dial = $(dial_html); $dial.dialog({ modal: true, title: "Filter Entry", buttons: { Cancel: function() { $(this).dialog("close"); }, OK: function() { $(this).dialog("close"); var name = $(this).find("input").val(); self.rpc('/base/searchview/save_filter', { model: self.dataset.model, context_to_save: context, domain: domain, name: name }).then(function() { self.reload_managed_filters(); }); } } }); } else { // manage_filters select.val("_filters"); this.do_action({ res_model: 'ir.filters', views: [[false, 'list'], [false, 'form']], type: 'ir.actions.act_window', context: {"search_default_user_id": this.session.uid, "search_default_model_id": this.dataset.model}, target: "current", limit : 80, auto_search : true }); } }, /** * Performs the search view collection of widget data. * * If the collection went well (all fields are valid), then triggers * :js:func:`openerp.base.SearchView.on_search`. * * If at least one field failed its validation, triggers * :js:func:`openerp.base.SearchView.on_invalid` instead. * * @param e jQuery event object coming from the "Search" button */ do_search: function (e) { // reset filters management var select = this.$element.find(".oe_search-view-filters-management"); select.val("_filters"); if (e && e.preventDefault) { e.preventDefault(); } var data = this.build_search_data(); if (data.errors.length) { this.on_invalid(data.errors); return; } this.on_search(data.domains, data.contexts, data.groupbys); }, build_search_data: function() { var domains = [], contexts = [], errors = []; _.each(this.inputs, function (input) { try { var domain = input.get_domain(); if (domain) { domains.push(domain); } var context = input.get_context(); if (context) { contexts.push(context); } } catch (e) { if (e instanceof openerp.base.search.Invalid) { errors.push(e); } else { throw e; } } }); // TODO: do we need to handle *fields* with group_by in their context? var groupbys = _(this.enabled_filters) .chain() .map(function (filter) { return filter.get_context();}) .compact() .value(); return {domains: domains, contexts: contexts, errors: errors, groupbys: groupbys}; }, /** * Triggered after the SearchView has collected all relevant domains and * contexts. * * It is provided with an Array of domains and an Array of contexts, which * may or may not be evaluated (each item can be either a valid domain or * context, or a string to evaluate in order in the sequence) * * It is also passed an array of contexts used for group_by (they are in * the correct order for group_by evaluation, which contexts may not be) * * @event * @param {Array} domains an array of literal domains or domain references * @param {Array} contexts an array of literal contexts or context refs * @param {Array} groupbys ordered contexts which may or may not have group_by keys */ on_search: function (domains, contexts, groupbys) { }, /** * Triggered after a validation error in the SearchView fields. * * Error objects have three keys: * * ``field`` is the name of the invalid field * * ``value`` is the invalid value * * ``message`` is the (in)validation message provided by the field * * @event * @param {Array} errors a never-empty array of error objects */ on_invalid: function (errors) { this.notification.notify("Invalid Search", "triggered from search view"); }, do_clear: function (e) { if (e && e.preventDefault) { e.preventDefault(); } this.on_clear(); }, /** * Triggered when the search view gets cleared * * @event */ on_clear: function () { }, /** * Called by a filter propagating its state changes * * @param {openerp.base.search.Filter} filter a filter which got toggled * @param {Boolean} default_enabled filter got enabled through the default values, at render time. */ do_toggle_filter: function (filter, default_enabled) { if (default_enabled || filter.is_enabled()) { this.enabled_filters.push(filter); } else { this.enabled_filters = _.without( this.enabled_filters, filter); } if (!default_enabled) { // selecting a filter after initial loading automatically // triggers refresh this.$element.find('form').submit(); } } }); /** @namespace */ openerp.base.search = {}; /** * Registry of search fields, called by :js:class:`openerp.base.SearchView` to * find and instantiate its field widgets. */ openerp.base.search.fields = new openerp.base.Registry({ 'char': 'openerp.base.search.CharField', 'text': 'openerp.base.search.CharField', 'boolean': 'openerp.base.search.BooleanField', 'integer': 'openerp.base.search.IntegerField', 'float': 'openerp.base.search.FloatField', 'selection': 'openerp.base.search.SelectionField', 'datetime': 'openerp.base.search.DateTimeField', 'date': 'openerp.base.search.DateField', 'one2many': 'openerp.base.search.OneToManyField', 'many2one': 'openerp.base.search.ManyToOneField', 'many2many': 'openerp.base.search.ManyToManyField' }); openerp.base.search.Invalid = openerp.base.Class.extend( /** @lends openerp.base.search.Invalid# */{ /** * Exception thrown by search widgets when they hold invalid values, * which they can not return when asked. * * @constructs * @param field the name of the field holding an invalid value * @param value the invalid value * @param message validation failure message */ init: function (field, value, message) { this.field = field; this.value = value; this.message = message; }, toString: function () { return ('Incorrect value for field ' + this.field + ': [' + this.value + '] is ' + this.message); } }); openerp.base.search.Widget = openerp.base.Widget.extend( /** @lends openerp.base.search.Widget# */{ template: null, /** * Root class of all search widgets * * @constructs * @extends openerp.base.Widget * * @param view the ancestor view of this widget */ init: function (view) { this.view = view; }, /** * Sets and returns a globally unique identifier for the widget. * * If a prefix is specified, the identifier will be appended to it. * * @params prefix prefix sections, empty/falsy sections will be removed */ make_id: function () { this.element_id = _.uniqueId( ['search'].concat( _.compact(_.toArray(arguments)), ['']).join('_')); return this.element_id; }, /** * "Starts" the widgets. Called at the end of the rendering, this allows * widgets to hook themselves to their view sections. * * On widgets, if they kept a reference to a view and have an element_id, * will fetch and set their root element on $element. */ start: function () { this._super(); if (this.view && this.element_id) { // id is unique, and no getElementById on elements this.$element = $(document.getElementById( this.element_id)); } }, /** * "Stops" the widgets. Called when the view destroys itself, this * lets the widgets clean up after themselves. */ stop: function () { delete this.view; this._super(); }, render: function (defaults) { return QWeb.render( this.template, _.extend(this, { defaults: defaults })); } }); openerp.base.search.FilterGroup = openerp.base.search.Widget.extend({ template: 'SearchView.filters', init: function (filters, view) { this._super(view); this.filters = filters; this.length = filters.length; }, start: function () { this._super(); _.each(this.filters, function (filter) { filter.start(); }); } }); openerp.base.search.add_expand_listener = function($root) { $root.find('a.searchview_group_string').click(function (e) { $root.toggleClass('folded expanded'); e.stopPropagation(); e.preventDefault(); }); }; openerp.base.search.Group = openerp.base.search.Widget.extend({ template: 'SearchView.group', init: function (view_section, view, fields) { this._super(view); this.attrs = view_section.attrs; this.lines = view.make_widgets( view_section.children, fields); this.make_id('group'); }, start: function () { this._super(); _(this.lines) .chain() .flatten() .each(function (widget) { widget.start(); }); openerp.base.search.add_expand_listener(this.$element); } }); openerp.base.search.Input = openerp.base.search.Widget.extend( /** @lends openerp.base.search.Input# */{ /** * @constructs * @extends openerp.base.search.Widget * * @param view */ init: function (view) { this._super(view); this.view.inputs.push(this); }, get_context: function () { throw new Error( "get_context not implemented for widget " + this.attrs.type); }, get_domain: function () { throw new Error( "get_domain not implemented for widget " + this.attrs.type); } }); openerp.base.search.Filter = openerp.base.search.Input.extend({ template: 'SearchView.filter', init: function (node, view) { this._super(view); this.attrs = node.attrs; this.classes = [this.attrs.string ? 'filter_label' : 'filter_icon']; this.make_id('filter', this.attrs.name); }, start: function () { this._super(); var self = this; this.$element.click(function (e) { $(this).toggleClass('enabled'); self.view.do_toggle_filter(self); }); }, /** * Returns whether the filter is currently enabled (in use) or not. * * @returns a boolean */ is_enabled:function () { return this.$element.hasClass('enabled'); }, /** * If the filter is present in the defaults (and has a truthy value), * enable the filter. * * @param {Object} defaults the search view's default values */ render: function (defaults) { if (this.attrs.name && defaults[this.attrs.name]) { this.classes.push('enabled'); this.view.do_toggle_filter(this, true); } return this._super(defaults); }, get_context: function () { if (!this.is_enabled()) { return; } return this.attrs.context; }, get_domain: function () { if (!this.is_enabled()) { return; } return this.attrs.domain; } }); openerp.base.search.Field = openerp.base.search.Input.extend( /** @lends openerp.base.search.Field# */ { template: 'SearchView.field', default_operator: '=', /** * @constructs * @extends openerp.base.search.Input * * @param view_section * @param field * @param view */ init: function (view_section, field, view) { this._super(view); this.attrs = _.extend({}, field, view_section.attrs); this.filters = new openerp.base.search.FilterGroup(_.map( view_section.children, function (filter_node) { return new openerp.base.search.Filter( filter_node, view); }), view); this.make_id('input', field.type, this.attrs.name); }, start: function () { this._super(); this.filters.start(); }, get_context: function () { var val = this.get_value(); // A field needs a value to be "active", and a context to send when // active var has_value = (val !== null && val !== ''); var context = this.attrs.context; if (!(has_value && context)) { return; } return _.extend( {}, context, {own_values: {self: val}}); }, get_domain: function () { var val = this.get_value(); if (val === null || val === '') { return; } var domain = this.attrs['filter_domain']; if (!domain) { return [[ this.attrs.name, this.attrs.operator || this.default_operator, this.get_value() ]]; } return _.extend({}, domain, {own_values: {self: val}}); } }); /** * Implementation of the ``char`` OpenERP field type: * * * Default operator is ``ilike`` rather than ``=`` * * * The Javascript and the HTML values are identical (strings) * * @class * @extends openerp.base.search.Field */ openerp.base.search.CharField = openerp.base.search.Field.extend( /** @lends openerp.base.search.CharField# */ { default_operator: 'ilike', get_value: function () { return this.$element.val(); } }); openerp.base.search.BooleanField = openerp.base.search.Field.extend({ template: 'SearchView.field.selection', init: function () { this._super.apply(this, arguments); this.attrs.selection = [ ['true', 'Yes'], ['false', 'No'] ]; }, /** * Search defaults likely to be boolean values (for a boolean field). * * In the HTML, we only get strings, and our strings here are * <code>'true'</code> and <code>'false'</code>, so ensure we get only * those by truth-testing the default value. * * @param {Object} defaults default values for this search view */ render: function (defaults) { var name = this.attrs.name; if (name in defaults) { defaults[name] = defaults[name] ? "true" : "false"; } return this._super(defaults); }, get_value: function () { switch (this.$element.val()) { case 'false': return false; case 'true': return true; default: return null; } } }); openerp.base.search.NumberField = openerp.base.search.Field.extend(/** @lends openerp.base.search.NumberField# */{ get_value: function () { if (!this.$element.val()) { return null; } var val = this.parse(this.$element.val()), check = Number(this.$element.val()); if (isNaN(val) || val !== check) { this.$element.addClass('error'); throw new openerp.base.search.Invalid( this.attrs.name, this.$element.val(), this.error_message); } this.$element.removeClass('error'); return val; } }); /** * @class * @extends openerp.base.search.NumberField */ openerp.base.search.IntegerField = openerp.base.search.NumberField.extend(/** @lends openerp.base.search.IntegerField# */{ error_message: "not a valid integer", parse: function (value) { return parseInt(value, 10); } }); /** * @class * @extends openerp.base.search.NumberField */ openerp.base.search.FloatField = openerp.base.search.NumberField.extend(/** @lends openerp.base.search.FloatField# */{ error_message: "not a valid number", parse: function (value) { return parseFloat(value); } }); openerp.base.search.SelectionField = openerp.base.search.Field.extend({ template: 'SearchView.field.selection', get_value: function () { return this.$element.val(); } }); openerp.base.search.DateField = openerp.base.search.Field.extend( /** @lends openerp.base.search.DateField# */{ template: 'SearchView.fields.date', /** * enables date picker on the HTML widgets */ start: function () { this._super(); this.$element.find('input').datepicker({ dateFormat: 'yy-mm-dd' }); }, stop: function () { this.$element.find('input').datepicker('destroy'); }, /** * Returns an object with two optional keys ``from`` and ``to`` providing * the values for resp. the from and to sections of the date widget. * * If a key is absent, then the corresponding field was not filled. * * @returns {Object} */ get_values: function () { var values_array = this.$element.find('input').serializeArray(); if (!values_array || !values_array[0]) { throw new openerp.base.search.Invalid( this.attrs.name, null, "widget not ready"); } var from = values_array[0].value, to = values_array[1].value; var field_values = {}; if (from) { field_values.from = from; } if (to) { field_values.to = to; } return field_values; }, get_context: function () { var values = this.get_values(); if (!this.attrs.context || _.isEmpty(values)) { return null; } return _.extend( {}, this.attrs.context, {own_values: {self: values}}); }, get_domain: function () { var values = this.get_values(); if (_.isEmpty(values)) { return null; } var domain = this.attrs['filter_domain']; if (!domain) { domain = []; if (values.from) { domain.push([this.attrs.name, '>=', values.from]); } if (values.to) { domain.push([this.attrs.name, '<=', values.to]); } return domain; } return _.extend( {}, domain, {own_values: {self: values}}); } }); openerp.base.search.DateTimeField = openerp.base.search.DateField.extend({ // TODO: time? }); openerp.base.search.OneToManyField = openerp.base.search.CharField.extend({ // TODO: .relation, .context, .domain }); openerp.base.search.ManyToOneField = openerp.base.search.CharField.extend({ // TODO: @widget // TODO: .relation, .selection, .context, .domain init: function (view_section, field, view) { this._super(view_section, field, view); var self = this; this.got_name = $.Deferred().then(function () { self.$element.val(self.name); }); this.dataset = new openerp.base.DataSet( this.view, this.attrs['relation']); }, start: function () { this._super(); this.setup_autocomplete(); var started = $.Deferred(); this.got_name.then(function () { started.resolve();}, function () { started.resolve(); }); return started.promise(); }, setup_autocomplete: function () { var self = this; this.$element.autocomplete({ source: function (req, resp) { self.dataset.name_search( req.term, self.attrs.domain, 'ilike', 8, function (data) { resp(_.map(data, function (result) { return {id: result[0], label: result[1]} })); }); }, select: function (event, ui) { self.id = ui.item.id; self.name = ui.item.label; }, delay: 0 }) }, on_name_get: function (name_get) { if (!name_get.length) { delete this.id; this.got_name.reject(); return; } this.name = name_get[0][1]; this.got_name.resolve(); }, render: function (defaults) { if (defaults[this.attrs.name]) { this.id = defaults[this.attrs.name]; // TODO: maybe this should not be completely removed delete defaults[this.attrs.name]; this.dataset.name_get([this.id], $.proxy(this, 'on_name_get')); } else { this.got_name.reject(); } return this._super(defaults); }, get_domain: function () { if (this.id && this.name) { if (this.$element.val() === this.name) { return [[this.attrs.name, '=', this.id]]; } else { delete this.id; delete this.name; } } return this._super(); } }); openerp.base.search.ManyToManyField = openerp.base.search.CharField.extend({ // TODO: .related_columns (Array), .context, .domain }); openerp.base.search.ExtendedSearch = openerp.base.OldWidget.extend({ template: 'SearchView.extended_search', identifier_prefix: 'extended-search', init: function (parent, model) { this._super(parent); this.model = model; }, add_group: function() { var group = new openerp.base.search.ExtendedSearchGroup(this, this.fields); group.appendTo(this.$element.find('.searchview_extended_groups_list')); this.check_last_element(); }, start: function () { this._super(); if (!this.$element) { return; // not a logical state but sometimes it happens } this.$element.closest("table.oe-searchview-render-line").css("display", "none"); var self = this; this.rpc("/base/searchview/fields_get", {"model": this.model}, function(data) { self.fields = data.fields; openerp.base.search.add_expand_listener(self.$element); self.$element.find('.searchview_extended_add_group').click(function (e) { self.add_group(); }); }); }, get_context: function() { return null; }, get_domain: function() { if (!this.$element) { return null; // not a logical state but sometimes it happens } if(this.$element.closest("table.oe-searchview-render-line").css("display") == "none") { return null; } return _.reduce(this.widget_children, function(mem, x) { return mem.concat(x.get_domain());}, []); }, on_activate: function() { this.add_group(); var table = this.$element.closest("table.oe-searchview-render-line"); table.css("display", ""); if(this.$element.hasClass("folded")) { this.$element.toggleClass("folded expanded"); } }, hide: function() { var table = this.$element.closest("table.oe-searchview-render-line"); table.css("display", "none"); if(this.$element.hasClass("expanded")) { this.$element.toggleClass("folded expanded"); } }, check_last_element: function() { _.each(this.widget_children, function(x) {x.set_last_group(false);}); if (this.widget_children.length >= 1) { this.widget_children[this.widget_children.length - 1].set_last_group(true); } } }); openerp.base.search.ExtendedSearchGroup = openerp.base.OldWidget.extend({ template: 'SearchView.extended_search.group', identifier_prefix: 'extended-search-group', init: function (parent, fields) { this._super(parent); this.fields = fields; }, add_prop: function() { var prop = new openerp.base.search.ExtendedSearchProposition(this, this.fields); var render = prop.render({'index': this.widget_children.length - 1}); this.$element.find('.searchview_extended_propositions_list').append(render); prop.start(); }, start: function () { this._super(); var _this = this; this.add_prop(); this.$element.find('.searchview_extended_add_proposition').click(function (e) { _this.add_prop(); }); var delete_btn = this.$element.find('.searchview_extended_delete_group'); delete_btn.click(function (e) { _this.stop(); }); }, get_domain: function() { var props = _(this.widget_children).chain().map(function(x) { return x.get_proposition(); }).compact().value(); var choice = this.$element.find(".searchview_extended_group_choice").val(); var op = choice == "all" ? "&" : "|"; return [].concat(choice == "none" ? ['!'] : [], _.map(_.range(_.max([0,props.length - 1])), function() { return op; }), props); }, stop: function() { var parent = this.widget_parent; if (this.widget_parent.widget_children.length == 1) this.widget_parent.hide(); this._super(); parent.check_last_element(); }, set_last_group: function(is_last) { if(is_last) this.$element.addClass("last_group"); else this.$element.removeClass("last_group"); } }); openerp.base.search.ExtendedSearchProposition = openerp.base.OldWidget.extend({ template: 'SearchView.extended_search.proposition', identifier_prefix: 'extended-search-proposition', init: function (parent, fields) { this._super(parent); this.fields = _(fields).chain() .map(function(val, key) { return _.extend({}, val, {'name': key}); }) .sortBy(function(field) {return field.string;}) .value(); this.attrs = {_: _, fields: this.fields, selected: null}; this.value = null; }, start: function () { this._super(); this.select_field(this.fields.length > 0 ? this.fields[0] : null); var _this = this; this.$element.find(".searchview_extended_prop_field").change(function() { _this.changed(); }); var delete_btn = this.$element.find('.searchview_extended_delete_prop'); delete_btn.click(function (e) { _this.stop(); }); }, stop: function() { var parent; if (this.widget_parent.widget_children.length == 1) parent = this.widget_parent; this._super(); if (parent) parent.stop(); }, changed: function() { var nval = this.$element.find(".searchview_extended_prop_field").val(); if(this.attrs.selected == null || nval != this.attrs.selected.name) { this.select_field(_.detect(this.fields, function(x) {return x.name == nval;})); } }, /** * Selects the provided field object * * @param field a field descriptor object (as returned by fields_get, augmented by the field name) */ select_field: function(field) { var _this = this; if(this.attrs.selected != null) { this.value.stop(); this.value = null; this.$element.find('.searchview_extended_prop_op').html(''); } this.attrs.selected = field; if(field == null) { return; } var type = field.type; try { openerp.base.search.custom_filters.get_object(type); } catch (e) { if (! e instanceof openerp.base.KeyNotFound) { throw e; } type = "char"; this.log('Unknow field type ' + e.key); } this.value = new (openerp.base.search.custom_filters.get_object(type)) (this); if(this.value.set_field) { this.value.set_field(field); } _.each(this.value.operators, function(operator) { var option = jQuery('<option>', {value: operator.value}) .text(operator.text) .appendTo(_this.$element.find('.searchview_extended_prop_op')); }); this.$element.find('.searchview_extended_prop_value').html( this.value.render({})); this.value.start(); }, get_proposition: function() { if ( this.attrs.selected == null) return null; var field = this.attrs.selected.name; var op = this.$element.find('.searchview_extended_prop_op').val(); var value = this.value.get_value(); return [field, op, value]; } }); openerp.base.search.ExtendedSearchProposition.Char = openerp.base.OldWidget.extend({ template: 'SearchView.extended_search.proposition.char', identifier_prefix: 'extended-search-proposition-char', operators: [ {value: "ilike", text: "contains"}, {value: "not ilike", text: "doesn't contain"}, {value: "=", text: "is equal to"}, {value: "!=", text: "is not equal to"}, {value: ">", text: "greater than"}, {value: "<", text: "less than"}, {value: ">=", text: "greater or equal than"}, {value: "<=", text: "less or equal than"} ], get_value: function() { return this.$element.val(); } }); openerp.base.search.ExtendedSearchProposition.DateTime = openerp.base.OldWidget.extend({ template: 'SearchView.extended_search.proposition.datetime', identifier_prefix: 'extended-search-proposition-datetime', operators: [ {value: "=", text: "is equal to"}, {value: "!=", text: "is not equal to"}, {value: ">", text: "greater than"}, {value: "<", text: "less than"}, {value: ">=", text: "greater or equal than"}, {value: "<=", text: "less or equal than"} ], get_value: function() { return this.$element.val(); }, start: function() { this._super(); this.$element.datetimepicker({ dateFormat: 'yy-mm-dd', timeFormat: 'hh:mm:ss' }); } }); openerp.base.search.ExtendedSearchProposition.Date = openerp.base.OldWidget.extend({ template: 'SearchView.extended_search.proposition.date', identifier_prefix: 'extended-search-proposition-date', operators: [ {value: "=", text: "is equal to"}, {value: "!=", text: "is not equal to"}, {value: ">", text: "greater than"}, {value: "<", text: "less than"}, {value: ">=", text: "greater or equal than"}, {value: "<=", text: "less or equal than"} ], get_value: function() { return this.$element.val(); }, start: function() { this._super(); this.$element.datepicker({ dateFormat: 'yy-mm-dd', timeFormat: 'hh:mm:ss' }); } }); openerp.base.search.ExtendedSearchProposition.Integer = openerp.base.OldWidget.extend({ template: 'SearchView.extended_search.proposition.integer', identifier_prefix: 'extended-search-proposition-integer', operators: [ {value: "=", text: "is equal to"}, {value: "!=", text: "is not equal to"}, {value: ">", text: "greater than"}, {value: "<", text: "less than"}, {value: ">=", text: "greater or equal than"}, {value: "<=", text: "less or equal than"} ], get_value: function() { var value = parseFloat(this.$element.val()); if(value != 0 && !value) { return ""; } return Math.round(value); } }); openerp.base.search.ExtendedSearchProposition.Float = openerp.base.OldWidget.extend({ template: 'SearchView.extended_search.proposition.float', identifier_prefix: 'extended-search-proposition-float', operators: [ {value: "=", text: "is equal to"}, {value: "!=", text: "is not equal to"}, {value: ">", text: "greater than"}, {value: "<", text: "less than"}, {value: ">=", text: "greater or equal than"}, {value: "<=", text: "less or equal than"} ], get_value: function() { var value = parseFloat(this.$element.val()); if(value != 0 && !value) { return ""; } return value; } }); openerp.base.search.ExtendedSearchProposition.Selection = openerp.base.OldWidget.extend({ template: 'SearchView.extended_search.proposition.selection', identifier_prefix: 'extended-search-proposition-selection', operators: [ {value: "=", text: "is"}, {value: "!=", text: "is not"} ], set_field: function(field) { this.field = field; }, get_value: function() { return this.$element.val(); } }); openerp.base.search.ExtendedSearchProposition.Boolean = openerp.base.OldWidget.extend({ template: 'SearchView.extended_search.proposition.boolean', identifier_prefix: 'extended-search-proposition-boolean', operators: [ {value: "=", text: "is true"}, {value: "!=", text: "is false"} ], get_value: function() { return true; } }); openerp.base.search.custom_filters = new openerp.base.Registry({ 'char': 'openerp.base.search.ExtendedSearchProposition.Char', 'text': 'openerp.base.search.ExtendedSearchProposition.Char', 'one2many': 'openerp.base.search.ExtendedSearchProposition.Char', 'many2one': 'openerp.base.search.ExtendedSearchProposition.Char', 'many2many': 'openerp.base.search.ExtendedSearchProposition.Char', 'datetime': 'openerp.base.search.ExtendedSearchProposition.DateTime', 'date': 'openerp.base.search.ExtendedSearchProposition.Date', 'integer': 'openerp.base.search.ExtendedSearchProposition.Integer', 'float': 'openerp.base.search.ExtendedSearchProposition.Float', 'boolean': 'openerp.base.search.ExtendedSearchProposition.Boolean', 'selection': 'openerp.base.search.ExtendedSearchProposition.Selection' }); }; // vim:et fdc=0 fdl=0 foldnestmax=3 fdm=syntax:
addons/base/static/src/js/search.js
openerp.base.search = function(openerp) { openerp.base.SearchView = openerp.base.Widget.extend({ init: function(parent, element_id, dataset, view_id, defaults) { this._super(parent, element_id); this.view_manager = parent || new openerp.base.NullViewManager(); this.dataset = dataset; this.model = dataset.model; this.view_id = view_id; this.defaults = defaults || {}; this.inputs = []; this.enabled_filters = []; this.has_focus = false; this.ready = $.Deferred(); }, start: function() { //this.log('Starting SearchView '+this.model+this.view_id) this.rpc("/base/searchview/load", {"model": this.model, "view_id":this.view_id}, this.on_loaded); return this.ready.promise(); }, show: function () { this.$element.show(); }, hide: function () { this.$element.hide(); }, /** * Builds a list of widget rows (each row is an array of widgets) * * @param {Array} items a list of nodes to convert to widgets * @param {Object} fields a mapping of field names to (ORM) field attributes * @returns Array */ make_widgets: function (items, fields) { var rows = [], row = []; rows.push(row); var filters = []; _.each(items, function (item) { if (filters.length && item.tag !== 'filter') { row.push( new openerp.base.search.FilterGroup( filters, this)); filters = []; } if (item.tag === 'newline') { row = []; rows.push(row); } else if (item.tag === 'filter') { if (!this.has_focus) { item.attrs.default_focus = '1'; this.has_focus = true; } filters.push( new openerp.base.search.Filter( item, this)); } else if (item.tag === 'separator') { // a separator is a no-op } else { if (item.tag === 'group') { // TODO: group and field should be fetched from registries, maybe even filters row.push( new openerp.base.search.Group( item, this, fields)); } else if (item.tag === 'field') { if (!this.has_focus) { item.attrs.default_focus = '1'; this.has_focus = true; } row.push( this.make_field( item, fields[item['attrs'].name])); } } }, this); if (filters.length) { row.push(new openerp.base.search.FilterGroup(filters, this)); } return rows; }, /** * Creates a field for the provided field descriptor item (which comes * from fields_view_get) * * @param {Object} item fields_view_get node for the field * @param {Object} field fields_get result for the field * @returns openerp.base.search.Field */ make_field: function (item, field) { try { return new (openerp.base.search.fields.get_object(field.type)) (item, field, this); } catch (e) { if (! e instanceof openerp.base.KeyNotFound) { throw e; } // KeyNotFound means unknown field type console.group('Unknown field type ' + field.type); console.error('View node', item); console.info('View field', field); console.info('In view', this); console.groupEnd(); return null; } }, on_loaded: function(data) { var self = this, lines = this.make_widgets( data.fields_view['arch'].children, data.fields_view.fields); // for extended search view var ext = new openerp.base.search.ExtendedSearch(this, this.model); lines.push([ext]); this.inputs.push(ext); var render = QWeb.render("SearchView", { 'view': data.fields_view['arch'], 'lines': lines, 'defaults': this.defaults }); // We don't understand why the following commented line does not work in Chrome but // the non-commented line does. As far as we investigated, only God knows. //this.$element.html(render); jQuery(render).appendTo(this.$element); this.$element.find(".oe_search-view-custom-filter-btn").click(ext.on_activate); var f = this.$element.find('form'); this.$element.find('form') .submit(this.do_search) .bind('reset', this.do_clear); // start() all the widgets var widget_starts = _(lines).chain().flatten().map(function (widget) { return widget.start(); }).value(); $.when.apply(null, widget_starts).then(function () { self.ready.resolve(); }); this.reload_managed_filters(); }, reload_managed_filters: function() { var self = this; return this.rpc('/base/searchview/get_filters', { model: this.dataset.model }).then(function(result) { self.managed_filters = result; var filters = self.$element.find(".oe_search-view-filters-management"); filters.html(QWeb.render("SearchView.managed-filters", {filters: result})); filters.change(self.on_filters_management); }); }, /** * Handle event when the user make a selection in the filters management select box. */ on_filters_management: function(e) { var self = this; var select = this.$element.find(".oe_search-view-filters-management"); var val = select.val(); if (val.slice(0,1) == "_") { // useless action select.val("_filters"); return; } if (val.slice(0, "get:".length) == "get:") { val = val.slice("get:".length); val = parseInt(val); var filter = this.managed_filters[val]; this.on_search([filter.domain], [filter.context], []); } else if (val == "save_filter") { select.val("_filters"); var data = this.build_search_data(); var context = new openerp.base.CompoundContext(); _.each(data.contexts, function(x) { context.add(x); }); var domain = new openerp.base.CompoundDomain(); _.each(data.domains, function(x) { domain.add(x); }); var dial_html = QWeb.render("SearchView.managed-filters.add"); var $dial = $(dial_html); $dial.dialog({ modal: true, title: "Filter Entry", buttons: { Cancel: function() { $(this).dialog("close"); }, OK: function() { $(this).dialog("close"); var name = $(this).find("input").val(); self.rpc('/base/searchview/save_filter', { model: self.dataset.model, context_to_save: context, domain: domain, name: name }).then(function(result) { self.reload_managed_filters(); }); }, } }); } else { // manage_filters select.val("_filters"); this.do_action({ res_model: 'ir.filters', views: [[false, 'list'], [false, 'form']], type: 'ir.actions.act_window', context: {"search_default_user_id": this.session.uid, "search_default_model_id": this.dataset.model}, target: "current", limit : 80, auto_search : true }); } }, /** * Performs the search view collection of widget data. * * If the collection went well (all fields are valid), then triggers * :js:func:`openerp.base.SearchView.on_search`. * * If at least one field failed its validation, triggers * :js:func:`openerp.base.SearchView.on_invalid` instead. * * @param e jQuery event object coming from the "Search" button */ do_search: function (e) { // reset filters management var select = this.$element.find(".oe_search-view-filters-management"); select.val("_filters"); if (e && e.preventDefault) { e.preventDefault(); } var data = this.build_search_data(); if (data.errors.length) { this.on_invalid(data.errors); return; } this.on_search(data.domains, data.contexts, data.groupbys); }, build_search_data: function() { var domains = [], contexts = [], errors = []; _.each(this.inputs, function (input) { try { var domain = input.get_domain(); if (domain) { domains.push(domain); } var context = input.get_context(); if (context) { contexts.push(context); } } catch (e) { if (e instanceof openerp.base.search.Invalid) { errors.push(e); } else { throw e; } } }); // TODO: do we need to handle *fields* with group_by in their context? var groupbys = _(this.enabled_filters) .chain() .map(function (filter) { return filter.get_context();}) .compact() .value(); return {domains: domains, contexts: contexts, errors: errors, groupbys: groupbys}; }, /** * Triggered after the SearchView has collected all relevant domains and * contexts. * * It is provided with an Array of domains and an Array of contexts, which * may or may not be evaluated (each item can be either a valid domain or * context, or a string to evaluate in order in the sequence) * * It is also passed an array of contexts used for group_by (they are in * the correct order for group_by evaluation, which contexts may not be) * * @event * @param {Array} domains an array of literal domains or domain references * @param {Array} contexts an array of literal contexts or context refs * @param {Array} groupbys ordered contexts which may or may not have group_by keys */ on_search: function (domains, contexts, groupbys) { }, /** * Triggered after a validation error in the SearchView fields. * * Error objects have three keys: * * ``field`` is the name of the invalid field * * ``value`` is the invalid value * * ``message`` is the (in)validation message provided by the field * * @event * @param {Array} errors a never-empty array of error objects */ on_invalid: function (errors) { this.notification.notify("Invalid Search", "triggered from search view"); }, do_clear: function (e) { if (e && e.preventDefault) { e.preventDefault(); } this.on_clear(); }, /** * Triggered when the search view gets cleared * * @event */ on_clear: function () { }, /** * Called by a filter propagating its state changes * * @param {openerp.base.search.Filter} filter a filter which got toggled * @param {Boolean} default_enabled filter got enabled through the default values, at render time. */ do_toggle_filter: function (filter, default_enabled) { if (default_enabled || filter.is_enabled()) { this.enabled_filters.push(filter); } else { this.enabled_filters = _.without( this.enabled_filters, filter); } if (!default_enabled) { // selecting a filter after initial loading automatically // triggers refresh this.$element.find('form').submit(); } } }); /** @namespace */ openerp.base.search = {}; /** * Registry of search fields, called by :js:class:`openerp.base.SearchView` to * find and instantiate its field widgets. */ openerp.base.search.fields = new openerp.base.Registry({ 'char': 'openerp.base.search.CharField', 'text': 'openerp.base.search.CharField', 'boolean': 'openerp.base.search.BooleanField', 'integer': 'openerp.base.search.IntegerField', 'float': 'openerp.base.search.FloatField', 'selection': 'openerp.base.search.SelectionField', 'datetime': 'openerp.base.search.DateTimeField', 'date': 'openerp.base.search.DateField', 'one2many': 'openerp.base.search.OneToManyField', 'many2one': 'openerp.base.search.ManyToOneField', 'many2many': 'openerp.base.search.ManyToManyField' }); openerp.base.search.Invalid = openerp.base.Class.extend( /** @lends openerp.base.search.Invalid# */{ /** * Exception thrown by search widgets when they hold invalid values, * which they can not return when asked. * * @constructs * @param field the name of the field holding an invalid value * @param value the invalid value * @param message validation failure message */ init: function (field, value, message) { this.field = field; this.value = value; this.message = message; }, toString: function () { return ('Incorrect value for field ' + this.field + ': [' + this.value + '] is ' + this.message); } }); openerp.base.search.Widget = openerp.base.Widget.extend( /** @lends openerp.base.search.Widget# */{ template: null, /** * Root class of all search widgets * * @constructs * @extends openerp.base.Widget * * @param view the ancestor view of this widget */ init: function (view) { this.view = view; }, /** * Sets and returns a globally unique identifier for the widget. * * If a prefix is specified, the identifier will be appended to it. * * @params prefix prefix sections, empty/falsy sections will be removed */ make_id: function () { this.element_id = _.uniqueId( ['search'].concat( _.compact(_.toArray(arguments)), ['']).join('_')); return this.element_id; }, /** * "Starts" the widgets. Called at the end of the rendering, this allows * widgets to hook themselves to their view sections. * * On widgets, if they kept a reference to a view and have an element_id, * will fetch and set their root element on $element. */ start: function () { this._super(); if (this.view && this.element_id) { // id is unique, and no getElementById on elements this.$element = $(document.getElementById( this.element_id)); } }, /** * "Stops" the widgets. Called when the view destroys itself, this * lets the widgets clean up after themselves. */ stop: function () { delete this.view; this._super(); }, render: function (defaults) { return QWeb.render( this.template, _.extend(this, { defaults: defaults })); } }); openerp.base.search.FilterGroup = openerp.base.search.Widget.extend({ template: 'SearchView.filters', init: function (filters, view) { this._super(view); this.filters = filters; this.length = filters.length; }, start: function () { this._super(); _.each(this.filters, function (filter) { filter.start(); }); } }); openerp.base.search.add_expand_listener = function($root) { $root.find('a.searchview_group_string').click(function (e) { $root.toggleClass('folded expanded'); e.stopPropagation(); e.preventDefault(); }); }; openerp.base.search.Group = openerp.base.search.Widget.extend({ template: 'SearchView.group', init: function (view_section, view, fields) { this._super(view); this.attrs = view_section.attrs; this.lines = view.make_widgets( view_section.children, fields); this.make_id('group'); }, start: function () { this._super(); _(this.lines) .chain() .flatten() .each(function (widget) { widget.start(); }); openerp.base.search.add_expand_listener(this.$element); } }); openerp.base.search.Input = openerp.base.search.Widget.extend( /** @lends openerp.base.search.Input# */{ /** * @constructs * @extends openerp.base.search.Widget * * @param view */ init: function (view) { this._super(view); this.view.inputs.push(this); }, get_context: function () { throw new Error( "get_context not implemented for widget " + this.attrs.type); }, get_domain: function () { throw new Error( "get_domain not implemented for widget " + this.attrs.type); } }); openerp.base.search.Filter = openerp.base.search.Input.extend({ template: 'SearchView.filter', init: function (node, view) { this._super(view); this.attrs = node.attrs; this.classes = [this.attrs.string ? 'filter_label' : 'filter_icon']; this.make_id('filter', this.attrs.name); }, start: function () { this._super(); var self = this; this.$element.click(function (e) { $(this).toggleClass('enabled'); self.view.do_toggle_filter(self); }); }, /** * Returns whether the filter is currently enabled (in use) or not. * * @returns a boolean */ is_enabled:function () { return this.$element.hasClass('enabled'); }, /** * If the filter is present in the defaults (and has a truthy value), * enable the filter. * * @param {Object} defaults the search view's default values */ render: function (defaults) { if (this.attrs.name && defaults[this.attrs.name]) { this.classes.push('enabled'); this.view.do_toggle_filter(this, true); } return this._super(defaults); }, get_context: function () { if (!this.is_enabled()) { return; } return this.attrs.context; }, get_domain: function () { if (!this.is_enabled()) { return; } return this.attrs.domain; } }); openerp.base.search.Field = openerp.base.search.Input.extend( /** @lends openerp.base.search.Field# */ { template: 'SearchView.field', default_operator: '=', /** * @constructs * @extends openerp.base.search.Input * * @param view_section * @param field * @param view */ init: function (view_section, field, view) { this._super(view); this.attrs = _.extend({}, field, view_section.attrs); this.filters = new openerp.base.search.FilterGroup(_.map( view_section.children, function (filter_node) { return new openerp.base.search.Filter( filter_node, view); }), view); this.make_id('input', field.type, this.attrs.name); }, start: function () { this._super(); this.filters.start(); }, get_context: function () { var val = this.get_value(); // A field needs a value to be "active", and a context to send when // active var has_value = (val !== null && val !== ''); var context = this.attrs.context; if (!(has_value && context)) { return; } return _.extend( {}, context, {own_values: {self: val}}); }, get_domain: function () { var val = this.get_value(); if (val === null || val === '') { return; } var domain = this.attrs['filter_domain']; if (!domain) { return [[ this.attrs.name, this.attrs.operator || this.default_operator, this.get_value() ]]; } return _.extend({}, domain, {own_values: {self: val}}); } }); /** * Implementation of the ``char`` OpenERP field type: * * * Default operator is ``ilike`` rather than ``=`` * * * The Javascript and the HTML values are identical (strings) * * @class * @extends openerp.base.search.Field */ openerp.base.search.CharField = openerp.base.search.Field.extend( /** @lends openerp.base.search.CharField# */ { default_operator: 'ilike', get_value: function () { return this.$element.val(); } }); openerp.base.search.BooleanField = openerp.base.search.Field.extend({ template: 'SearchView.field.selection', init: function () { this._super.apply(this, arguments); this.attrs.selection = [ ['true', 'Yes'], ['false', 'No'] ]; }, /** * Search defaults likely to be boolean values (for a boolean field). * * In the HTML, we only get strings, and our strings here are * <code>'true'</code> and <code>'false'</code>, so ensure we get only * those by truth-testing the default value. * * @param {Object} defaults default values for this search view */ render: function (defaults) { var name = this.attrs.name; if (name in defaults) { defaults[name] = defaults[name] ? "true" : "false"; } return this._super(defaults); }, get_value: function () { switch (this.$element.val()) { case 'false': return false; case 'true': return true; default: return null; } } }); openerp.base.search.NumberField = openerp.base.search.Field.extend(/** @lends openerp.base.search.NumberField# */{ get_value: function () { if (!this.$element.val()) { return null; } var val = this.parse(this.$element.val()), check = Number(this.$element.val()); if (isNaN(val) || val !== check) { this.$element.addClass('error'); throw new openerp.base.search.Invalid( this.attrs.name, this.$element.val(), this.error_message); } this.$element.removeClass('error'); return val; } }); /** * @class * @extends openerp.base.search.NumberField */ openerp.base.search.IntegerField = openerp.base.search.NumberField.extend(/** @lends openerp.base.search.IntegerField# */{ error_message: "not a valid integer", parse: function (value) { return parseInt(value, 10); } }); /** * @class * @extends openerp.base.search.NumberField */ openerp.base.search.FloatField = openerp.base.search.NumberField.extend(/** @lends openerp.base.search.FloatField# */{ error_message: "not a valid number", parse: function (value) { return parseFloat(value); } }); openerp.base.search.SelectionField = openerp.base.search.Field.extend({ template: 'SearchView.field.selection', get_value: function () { return this.$element.val(); } }); openerp.base.search.DateField = openerp.base.search.Field.extend( /** @lends openerp.base.search.DateField# */{ template: 'SearchView.fields.date', /** * enables date picker on the HTML widgets */ start: function () { this._super(); this.$element.find('input').datepicker({ dateFormat: 'yy-mm-dd' }); }, stop: function () { this.$element.find('input').datepicker('destroy'); }, /** * Returns an object with two optional keys ``from`` and ``to`` providing * the values for resp. the from and to sections of the date widget. * * If a key is absent, then the corresponding field was not filled. * * @returns {Object} */ get_values: function () { var values_array = this.$element.find('input').serializeArray(); if (!values_array || !values_array[0]) { throw new openerp.base.search.Invalid( this.attrs.name, null, "widget not ready"); } var from = values_array[0].value, to = values_array[1].value; var field_values = {}; if (from) { field_values.from = from; } if (to) { field_values.to = to; } return field_values; }, get_context: function () { var values = this.get_values(); if (!this.attrs.context || _.isEmpty(values)) { return null; } return _.extend( {}, this.attrs.context, {own_values: {self: values}}); }, get_domain: function () { var values = this.get_values(); if (_.isEmpty(values)) { return null; } var domain = this.attrs['filter_domain']; if (!domain) { domain = []; if (values.from) { domain.push([this.attrs.name, '>=', values.from]); } if (values.to) { domain.push([this.attrs.name, '<=', values.to]); } return domain; } return _.extend( {}, domain, {own_values: {self: values}}); } }); openerp.base.search.DateTimeField = openerp.base.search.DateField.extend({ // TODO: time? }); openerp.base.search.OneToManyField = openerp.base.search.CharField.extend({ // TODO: .relation, .context, .domain }); openerp.base.search.ManyToOneField = openerp.base.search.CharField.extend({ // TODO: @widget // TODO: .relation, .selection, .context, .domain init: function (view_section, field, view) { this._super(view_section, field, view); var self = this; this.got_name = $.Deferred().then(function () { self.$element.val(self.name); }); this.dataset = new openerp.base.DataSet( this.view, this.attrs['relation']); }, start: function () { this._super(); this.setup_autocomplete(); var started = $.Deferred(); this.got_name.then(function () { started.resolve();}, function () { started.resolve(); }); return started.promise(); }, setup_autocomplete: function () { var self = this; this.$element.autocomplete({ source: function (req, resp) { self.dataset.name_search( req.term, self.attrs.domain, 'ilike', 8, function (data) { resp(_.map(data, function (result) { return {id: result[0], label: result[1]} })); }); }, select: function (event, ui) { self.id = ui.item.id; self.name = ui.item.label; }, delay: 0 }) }, on_name_get: function (name_get) { if (!name_get.length) { delete this.id; this.got_name.reject(); return; } this.name = name_get[0][1]; this.got_name.resolve(); }, render: function (defaults) { if (defaults[this.attrs.name]) { this.id = defaults[this.attrs.name]; // TODO: maybe this should not be completely removed delete defaults[this.attrs.name]; this.dataset.name_get([this.id], $.proxy(this, 'on_name_get')); } else { this.got_name.reject(); } return this._super(defaults); }, get_domain: function () { if (this.id && this.name) { if (this.$element.val() === this.name) { return [[this.attrs.name, '=', this.id]]; } else { delete this.id; delete this.name; } } return this._super(); } }); openerp.base.search.ManyToManyField = openerp.base.search.CharField.extend({ // TODO: .related_columns (Array), .context, .domain }); openerp.base.search.ExtendedSearch = openerp.base.OldWidget.extend({ template: 'SearchView.extended_search', identifier_prefix: 'extended-search', init: function (parent, model) { this._super(parent); this.model = model; }, add_group: function() { var group = new openerp.base.search.ExtendedSearchGroup(this, this.fields); group.appendTo(this.$element.find('.searchview_extended_groups_list')); this.check_last_element(); }, start: function () { this._super(); if (!this.$element) { return; // not a logical state but sometimes it happens } this.$element.closest("table.oe-searchview-render-line").css("display", "none"); var self = this; this.rpc("/base/searchview/fields_get", {"model": this.model}, function(data) { self.fields = data.fields; openerp.base.search.add_expand_listener(self.$element); self.$element.find('.searchview_extended_add_group').click(function (e) { self.add_group(); }); }); }, get_context: function() { return null; }, get_domain: function() { if (!this.$element) { return null; // not a logical state but sometimes it happens } if(this.$element.closest("table.oe-searchview-render-line").css("display") == "none") { return null; } return _.reduce(this.widget_children, function(mem, x) { return mem.concat(x.get_domain());}, []); }, on_activate: function() { this.add_group(); var table = this.$element.closest("table.oe-searchview-render-line"); table.css("display", ""); if(this.$element.hasClass("folded")) { this.$element.toggleClass("folded expanded"); } }, hide: function() { var table = this.$element.closest("table.oe-searchview-render-line"); table.css("display", "none"); if(this.$element.hasClass("expanded")) { this.$element.toggleClass("folded expanded"); } }, check_last_element: function() { _.each(this.widget_children, function(x) {x.set_last_group(false);}); if (this.widget_children.length >= 1) { this.widget_children[this.widget_children.length - 1].set_last_group(true); } } }); openerp.base.search.ExtendedSearchGroup = openerp.base.OldWidget.extend({ template: 'SearchView.extended_search.group', identifier_prefix: 'extended-search-group', init: function (parent, fields) { this._super(parent); this.fields = fields; }, add_prop: function() { var prop = new openerp.base.search.ExtendedSearchProposition(this, this.fields); var render = prop.render({'index': this.widget_children.length - 1}); this.$element.find('.searchview_extended_propositions_list').append(render); prop.start(); }, start: function () { this._super(); var _this = this; this.add_prop(); this.$element.find('.searchview_extended_add_proposition').click(function (e) { _this.add_prop(); }); var delete_btn = this.$element.find('.searchview_extended_delete_group'); delete_btn.click(function (e) { _this.stop(); }); }, get_domain: function() { var props = _(this.widget_children).chain().map(function(x) { return x.get_proposition(); }).compact().value(); var choice = this.$element.find(".searchview_extended_group_choice").val(); var op = choice == "all" ? "&" : "|"; return [].concat(choice == "none" ? ['!'] : [], _.map(_.range(_.max([0,props.length - 1])), function() { return op; }), props); }, stop: function() { var parent = this.widget_parent; if (this.widget_parent.widget_children.length == 1) this.widget_parent.hide(); this._super(); parent.check_last_element(); }, set_last_group: function(is_last) { if(is_last) this.$element.addClass("last_group"); else this.$element.removeClass("last_group"); } }); openerp.base.search.ExtendedSearchProposition = openerp.base.OldWidget.extend({ template: 'SearchView.extended_search.proposition', identifier_prefix: 'extended-search-proposition', init: function (parent, fields) { this._super(parent); this.fields = _(fields).chain() .map(function(val, key) { return _.extend({}, val, {'name': key}); }) .sortBy(function(field) {return field.string;}) .value(); this.attrs = {_: _, fields: this.fields, selected: null}; this.value = null; }, start: function () { this._super(); this.select_field(this.fields.length > 0 ? this.fields[0] : null); var _this = this; this.$element.find(".searchview_extended_prop_field").change(function() { _this.changed(); }); var delete_btn = this.$element.find('.searchview_extended_delete_prop'); delete_btn.click(function (e) { _this.stop(); }); }, stop: function() { var parent; if (this.widget_parent.widget_children.length == 1) parent = this.widget_parent; this._super(); if (parent) parent.stop(); }, changed: function() { var nval = this.$element.find(".searchview_extended_prop_field").val(); if(this.attrs.selected == null || nval != this.attrs.selected.name) { this.select_field(_.detect(this.fields, function(x) {return x.name == nval;})); } }, /** * Selects the provided field object * * @param field a field descriptor object (as returned by fields_get, augmented by the field name) */ select_field: function(field) { var _this = this; if(this.attrs.selected != null) { this.value.stop(); this.value = null; this.$element.find('.searchview_extended_prop_op').html(''); } this.attrs.selected = field; if(field == null) { return; } var type = field.type; try { openerp.base.search.custom_filters.get_object(type); } catch (e) { if (! e instanceof openerp.base.KeyNotFound) { throw e; } type = "char"; this.log('Unknow field type ' + e.key); } this.value = new (openerp.base.search.custom_filters.get_object(type)) (this); if(this.value.set_field) { this.value.set_field(field); } _.each(this.value.operators, function(operator) { var option = jQuery('<option>', {value: operator.value}) .text(operator.text) .appendTo(_this.$element.find('.searchview_extended_prop_op')); }); this.$element.find('.searchview_extended_prop_value').html( this.value.render({})); this.value.start(); }, get_proposition: function() { if ( this.attrs.selected == null) return null; var field = this.attrs.selected.name; var op = this.$element.find('.searchview_extended_prop_op').val(); var value = this.value.get_value(); return [field, op, value]; } }); openerp.base.search.ExtendedSearchProposition.Char = openerp.base.OldWidget.extend({ template: 'SearchView.extended_search.proposition.char', identifier_prefix: 'extended-search-proposition-char', operators: [ {value: "ilike", text: "contains"}, {value: "not ilike", text: "doesn't contain"}, {value: "=", text: "is equal to"}, {value: "!=", text: "is not equal to"}, {value: ">", text: "greater than"}, {value: "<", text: "less than"}, {value: ">=", text: "greater or equal than"}, {value: "<=", text: "less or equal than"} ], get_value: function() { return this.$element.val(); } }); openerp.base.search.ExtendedSearchProposition.DateTime = openerp.base.OldWidget.extend({ template: 'SearchView.extended_search.proposition.datetime', identifier_prefix: 'extended-search-proposition-datetime', operators: [ {value: "=", text: "is equal to"}, {value: "!=", text: "is not equal to"}, {value: ">", text: "greater than"}, {value: "<", text: "less than"}, {value: ">=", text: "greater or equal than"}, {value: "<=", text: "less or equal than"} ], get_value: function() { return this.$element.val(); }, start: function() { this._super(); this.$element.datetimepicker({ dateFormat: 'yy-mm-dd', timeFormat: 'hh:mm:ss' }); } }); openerp.base.search.ExtendedSearchProposition.Date = openerp.base.OldWidget.extend({ template: 'SearchView.extended_search.proposition.date', identifier_prefix: 'extended-search-proposition-date', operators: [ {value: "=", text: "is equal to"}, {value: "!=", text: "is not equal to"}, {value: ">", text: "greater than"}, {value: "<", text: "less than"}, {value: ">=", text: "greater or equal than"}, {value: "<=", text: "less or equal than"} ], get_value: function() { return this.$element.val(); }, start: function() { this._super(); this.$element.datepicker({ dateFormat: 'yy-mm-dd', timeFormat: 'hh:mm:ss' }); } }); openerp.base.search.ExtendedSearchProposition.Integer = openerp.base.OldWidget.extend({ template: 'SearchView.extended_search.proposition.integer', identifier_prefix: 'extended-search-proposition-integer', operators: [ {value: "=", text: "is equal to"}, {value: "!=", text: "is not equal to"}, {value: ">", text: "greater than"}, {value: "<", text: "less than"}, {value: ">=", text: "greater or equal than"}, {value: "<=", text: "less or equal than"} ], get_value: function() { var value = parseFloat(this.$element.val()); if(value != 0 && !value) { return ""; } return Math.round(value); } }); openerp.base.search.ExtendedSearchProposition.Float = openerp.base.OldWidget.extend({ template: 'SearchView.extended_search.proposition.float', identifier_prefix: 'extended-search-proposition-float', operators: [ {value: "=", text: "is equal to"}, {value: "!=", text: "is not equal to"}, {value: ">", text: "greater than"}, {value: "<", text: "less than"}, {value: ">=", text: "greater or equal than"}, {value: "<=", text: "less or equal than"} ], get_value: function() { var value = parseFloat(this.$element.val()); if(value != 0 && !value) { return ""; } return value; } }); openerp.base.search.ExtendedSearchProposition.Selection = openerp.base.OldWidget.extend({ template: 'SearchView.extended_search.proposition.selection', identifier_prefix: 'extended-search-proposition-selection', operators: [ {value: "=", text: "is"}, {value: "!=", text: "is not"} ], set_field: function(field) { this.field = field; }, get_value: function() { return this.$element.val(); } }); openerp.base.search.ExtendedSearchProposition.Boolean = openerp.base.OldWidget.extend({ template: 'SearchView.extended_search.proposition.boolean', identifier_prefix: 'extended-search-proposition-boolean', operators: [ {value: "=", text: "is true"}, {value: "!=", text: "is false"} ], get_value: function() { return true; } }); openerp.base.search.custom_filters = new openerp.base.Registry({ 'char': 'openerp.base.search.ExtendedSearchProposition.Char', 'text': 'openerp.base.search.ExtendedSearchProposition.Char', 'one2many': 'openerp.base.search.ExtendedSearchProposition.Char', 'many2one': 'openerp.base.search.ExtendedSearchProposition.Char', 'many2many': 'openerp.base.search.ExtendedSearchProposition.Char', 'datetime': 'openerp.base.search.ExtendedSearchProposition.DateTime', 'date': 'openerp.base.search.ExtendedSearchProposition.Date', 'integer': 'openerp.base.search.ExtendedSearchProposition.Integer', 'float': 'openerp.base.search.ExtendedSearchProposition.Float', 'boolean': 'openerp.base.search.ExtendedSearchProposition.Boolean', 'selection': 'openerp.base.search.ExtendedSearchProposition.Selection' }); }; // vim:et fdc=0 fdl=0 foldnestmax=3 fdm=syntax:
[REM] trailing comma in object literal
addons/base/static/src/js/search.js
[REM] trailing comma in object literal
<ide><path>ddons/base/static/src/js/search.js <ide> context_to_save: context, <ide> domain: domain, <ide> name: name <del> }).then(function(result) { <add> }).then(function() { <ide> self.reload_managed_filters(); <ide> }); <del> }, <add> } <ide> } <ide> }); <ide> } else { // manage_filters
Java
apache-2.0
fe2b634dc6330f5775f797da3a600f661c576ad0
0
kickstarter/android-oss,kickstarter/android-oss,kickstarter/android-oss,kickstarter/android-oss
package com.kickstarter.ui.views; import android.content.Context; import android.content.Intent; import android.support.v7.widget.PopupMenu; import android.support.v7.widget.Toolbar; import android.util.AttributeSet; import android.view.View; import android.widget.AdapterView; import android.widget.ArrayAdapter; import android.widget.Spinner; import android.widget.TextView; import com.kickstarter.KSApplication; import com.kickstarter.R; import com.kickstarter.libs.CurrentUser; import com.kickstarter.libs.Logout; import com.kickstarter.models.User; import com.kickstarter.ui.activities.ActivityFeedActivity; import com.kickstarter.ui.activities.DiscoveryActivity; import com.kickstarter.ui.activities.LoginToutActivity; import javax.inject.Inject; import butterknife.Bind; import butterknife.ButterKnife; import rx.Subscription; import rx.android.schedulers.AndroidSchedulers; public class DiscoveryToolbar extends Toolbar { @Bind(R.id.activity_feed_button) TextView activityFeedButton; @Bind(R.id.category_spinner) Spinner categorySpinner; @Bind(R.id.current_user_button) TextView currentUserButton; @Bind(R.id.login_button) TextView loginButton; @Bind(R.id.toolbar) Toolbar toolbar; @Inject CurrentUser currentUser; @Inject Logout logout; Subscription loginSubscription; public DiscoveryToolbar(final Context context) { super(context); } public DiscoveryToolbar(final Context context, final AttributeSet attrs) { super(context, attrs); } public DiscoveryToolbar(final Context context, final AttributeSet attrs, final int defStyleAttr) { super(context, attrs, defStyleAttr); } @Override protected void onFinishInflate() { super.onFinishInflate(); if (isInEditMode()) { return; } ButterKnife.bind(this); ((KSApplication) getContext().getApplicationContext()).component().inject(this); initializeCategorySpinner(); activityFeedButton.setOnClickListener(v -> getContext() .startActivity(new Intent(getContext(), ActivityFeedActivity.class))); } protected void showLoggedInMenu(final User user) { loginButton.setVisibility(GONE); currentUserButton.setVisibility(VISIBLE); currentUserButton.setOnClickListener(v -> { final PopupMenu popup = new PopupMenu(v.getContext(), currentUserButton); popup.getMenuInflater().inflate(R.menu.current_user_menu, popup.getMenu()); popup.setOnMenuItemClickListener(item -> { switch (item.getItemId()) { case R.id.logout: final Context context = v.getContext(); logout.execute(); final Intent intent = new Intent(context, DiscoveryActivity.class) .setFlags(Intent.FLAG_ACTIVITY_NEW_TASK | Intent.FLAG_ACTIVITY_CLEAR_TASK); context.startActivity(intent); break; } return true; }); popup.show(); }); } protected void showLoggedOutMenu() { currentUserButton.setVisibility(GONE); loginButton.setVisibility(VISIBLE); loginButton.setOnClickListener(v -> { Intent intent = new Intent(getContext(), LoginToutActivity.class); getContext().startActivity(intent); }); } protected void initializeCategorySpinner() { final ArrayAdapter<CharSequence> adapter; if (!isInEditMode()) { adapter = ArrayAdapter.createFromResource(getContext(), R.array.spinner_categories_array, android.R.layout.simple_spinner_item); } else { final String sampleData[] = {"Staff Picks"}; adapter = new ArrayAdapter<>(getContext(), android.R.layout.simple_spinner_item, sampleData); } adapter.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item); categorySpinner.setAdapter(adapter); // onItemSelected will fire immediately with the default selection categorySpinner.setOnItemSelectedListener(new AdapterView.OnItemSelectedListener() { @Override public void onItemSelected(final AdapterView<?> spinner, final View view, final int position, final long itemId) { final String item = spinner.getItemAtPosition(position).toString(); } @Override public void onNothingSelected(final AdapterView<?> adapterView) { } }); } @Override protected void onAttachedToWindow() { super.onAttachedToWindow(); if (isInEditMode()) { return; } if (currentUser.getUser() == null) { showLoggedOutMenu(); } loginSubscription = currentUser.loggedInUser() .observeOn(AndroidSchedulers.mainThread()) .subscribe(this::showLoggedInMenu); } @Override protected void onDetachedFromWindow() { super.onDetachedFromWindow(); if (isInEditMode()) { return; } loginSubscription.unsubscribe(); } }
app/src/main/java/com/kickstarter/ui/views/DiscoveryToolbar.java
package com.kickstarter.ui.views; import android.content.Context; import android.content.Intent; import android.support.v7.widget.PopupMenu; import android.support.v7.widget.Toolbar; import android.util.AttributeSet; import android.view.View; import android.widget.AdapterView; import android.widget.ArrayAdapter; import android.widget.Spinner; import android.widget.TextView; import com.kickstarter.KSApplication; import com.kickstarter.R; import com.kickstarter.libs.CurrentUser; import com.kickstarter.libs.Logout; import com.kickstarter.models.User; import com.kickstarter.ui.activities.ActivityFeedActivity; import com.kickstarter.ui.activities.DiscoveryActivity; import com.kickstarter.ui.activities.LoginToutActivity; import javax.inject.Inject; import butterknife.Bind; import butterknife.ButterKnife; import rx.Subscription; import rx.android.schedulers.AndroidSchedulers; public class DiscoveryToolbar extends Toolbar { @Bind(R.id.activity_feed_button) TextView activityFeedButton; @Bind(R.id.category_spinner) Spinner categorySpinner; @Bind(R.id.current_user_button) TextView currentUserButton; @Bind(R.id.login_button) TextView loginButton; @Bind(R.id.toolbar) Toolbar toolbar; @Inject CurrentUser currentUser; @Inject Logout logout; Subscription loginSubscription; public DiscoveryToolbar(final Context context) { super(context); } public DiscoveryToolbar(final Context context, final AttributeSet attrs) { super(context, attrs); } public DiscoveryToolbar(final Context context, final AttributeSet attrs, final int defStyleAttr) { super(context, attrs, defStyleAttr); } @Override protected void onFinishInflate() { super.onFinishInflate(); if (isInEditMode()) { return; } ButterKnife.bind(this); ((KSApplication) getContext().getApplicationContext()).component().inject(this); initializeCategorySpinner(); activityFeedButton.setOnClickListener(v -> getContext() .startActivity(new Intent(getContext(), ActivityFeedActivity.class))); } protected void showLoggedInMenu(final User user) { loginButton.setVisibility(GONE); currentUserButton.setVisibility(VISIBLE); currentUserButton.setOnClickListener(v -> { final PopupMenu popup = new PopupMenu(v.getContext(), currentUserButton); popup.getMenuInflater().inflate(R.menu.current_user_menu, popup.getMenu()); popup.setOnMenuItemClickListener(item -> { switch (item.getItemId()) { case R.id.logout: final Context context = v.getContext(); logout.execute(); final Intent intent = new Intent(context, DiscoveryActivity.class) .setFlags(Intent.FLAG_ACTIVITY_NEW_TASK | Intent.FLAG_ACTIVITY_CLEAR_TASK); context.startActivity(intent); break; } return true; }); popup.show(); }); } protected void showLoggedOutMenu() { currentUserButton.setVisibility(GONE); loginButton.setVisibility(VISIBLE); loginButton.setOnClickListener(v -> { Intent intent = new Intent(getContext(), LoginToutActivity.class); getContext().startActivity(intent); }); } protected void initializeCategorySpinner() { final ArrayAdapter<CharSequence> adapter; if (!isInEditMode()) { adapter = ArrayAdapter.createFromResource(getContext(), R.array.spinner_categories_array, android.R.layout.simple_spinner_item); } else { final String sampleData[] = {"Staff Picks"}; adapter = new ArrayAdapter<>(getContext(), android.R.layout.simple_spinner_item, sampleData); } adapter.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item); categorySpinner.setAdapter(adapter); // onItemSelected will fire immediately with the default selection categorySpinner.setOnItemSelectedListener(new AdapterView.OnItemSelectedListener() { @Override public void onItemSelected(final AdapterView<?> spinner, final View view, final int position, final long itemId) { final String item = spinner.getItemAtPosition(position).toString(); } @Override public void onNothingSelected(final AdapterView<?> adapterView) { } }); } @Override protected void onAttachedToWindow() { super.onAttachedToWindow(); if (currentUser.getUser() == null) { showLoggedOutMenu(); } loginSubscription = currentUser.loggedInUser() .observeOn(AndroidSchedulers.mainThread()) .subscribe(this::showLoggedInMenu); } @Override protected void onDetachedFromWindow() { super.onDetachedFromWindow(); loginSubscription.unsubscribe(); } }
Allow toolbar to render in designer
app/src/main/java/com/kickstarter/ui/views/DiscoveryToolbar.java
Allow toolbar to render in designer
<ide><path>pp/src/main/java/com/kickstarter/ui/views/DiscoveryToolbar.java <ide> protected void onAttachedToWindow() { <ide> super.onAttachedToWindow(); <ide> <add> if (isInEditMode()) { <add> return; <add> } <add> <ide> if (currentUser.getUser() == null) { <ide> showLoggedOutMenu(); <ide> } <ide> protected void onDetachedFromWindow() { <ide> super.onDetachedFromWindow(); <ide> <add> if (isInEditMode()) { <add> return; <add> } <add> <ide> loginSubscription.unsubscribe(); <ide> } <ide> }
JavaScript
bsd-3-clause
db8ab80bef2b0d0e2cba2fe8014fd1d6aea7f637
0
CartoDB/Windshaft-cartodb,CartoDB/Windshaft-cartodb,CartoDB/Windshaft-cartodb,CartoDB/Windshaft-cartodb
var testHelper = require('../../support/test_helper'); var assert = require('../../support/assert'); var step = require('step'); var FastlyPurge = require('fastly-purge'); var _ = require('underscore'); var NamedMapsCacheEntry = require(__dirname + '/../../../lib/cartodb/cache/model/named_maps_entry'); var CartodbWindshaft = require(__dirname + '/../../../lib/cartodb/server'); describe('templates surrogate keys', function() { var serverOptions = require('../../../lib/cartodb/server_options'); // Enable Varnish purge for tests var varnishHost = serverOptions.varnish_host; serverOptions.varnish_host = '127.0.0.1'; var varnishPurgeEnabled = serverOptions.varnish_purge_enabled; serverOptions.varnish_purge_enabled = true; var fastlyConfig = serverOptions.fastly; var FAKE_FASTLY_API_KEY = 'fastly-api-key'; var FAKE_FASTLY_SERVICE_ID = 'fake-service-id'; serverOptions.fastly = { enabled: true, // the fastly api key apiKey: FAKE_FASTLY_API_KEY, // the service that will get surrogate key invalidation serviceId: FAKE_FASTLY_SERVICE_ID }; var server = new CartodbWindshaft(serverOptions); var templateOwner = 'localhost'; var templateName = 'acceptance'; var expectedTemplateId = templateName; var template = { version: '0.0.1', name: templateName, auth: { method: 'open' }, layergroup: { version: '1.2.0', layers: [ { options: { sql: 'select 1 cartodb_id, null::geometry as the_geom_webmercator', cartocss: '#layer { marker-fill:blue; }', cartocss_version: '2.3.0' } } ] } }; var templateUpdated = _.extend({}, template, {layergroup: {layers: [{ type: 'plain', options: { color: 'red' } }]} }); var expectedBody = { template_id: expectedTemplateId }; var varnishHttpUrl = [ 'http://', serverOptions.varnish_host, ':', serverOptions.varnish_http_port ].join(''); var cacheEntryKey = new NamedMapsCacheEntry(templateOwner, templateName).key(); var invalidationMatchHeader = '\\b' + cacheEntryKey + '\\b'; var fastlyPurgePath = '/service/' + FAKE_FASTLY_SERVICE_ID + '/purge/' + encodeURIComponent(cacheEntryKey); var nock = require('nock'); nock.enableNetConnect(/(127.0.0.1:5555|cartocdn.com)/); after(function(done) { serverOptions.varnish_purge_enabled = false; serverOptions.varnish_host = varnishHost; serverOptions.varnish_purge_enabled = varnishPurgeEnabled; serverOptions.fastly = fastlyConfig; nock.restore(); done(); }); function createTemplate(callback) { var postTemplateRequest = { url: '/api/v1/map/named?api_key=1234', method: 'POST', headers: { host: templateOwner, 'Content-Type': 'application/json' }, data: JSON.stringify(template) }; step( function postTemplate() { var next = this; assert.response(server, postTemplateRequest, { status: 200 }, function(res) { next(null, res); } ); }, function rePostTemplate(err, res) { if (err) { throw err; } var parsedBody = JSON.parse(res.body); assert.deepEqual(parsedBody, expectedBody); return true; }, function finish(err) { callback(err); } ); } it("invalidates surrogate keys on template update", function(done) { var scope = nock(varnishHttpUrl) .intercept('/key', 'PURGE') .matchHeader('Invalidation-Match', invalidationMatchHeader) .reply(204, ''); var fastlyScope = nock(FastlyPurge.FASTLY_API_ENDPOINT) .post(fastlyPurgePath) .matchHeader('Fastly-Key', FAKE_FASTLY_API_KEY) .matchHeader('Fastly-Soft-Purge', 1) .matchHeader('Accept', 'application/json') .reply(200, { status:'ok' }); step( function createTemplateToUpdate() { createTemplate(this); }, function putValidTemplate(err) { if (err) { throw err; } var updateTemplateRequest = { url: '/api/v1/map/named/' + expectedTemplateId + '/?api_key=1234', method: 'PUT', headers: { host: templateOwner, 'Content-Type': 'application/json' }, data: JSON.stringify(templateUpdated) }; var next = this; assert.response(server, updateTemplateRequest, { status: 200 }, function(res) { setTimeout(function() { next(null, res); }, 50); } ); }, function checkValidUpdate(err, res) { if (err) { throw err; } var parsedBody = JSON.parse(res.body); assert.deepEqual(parsedBody, expectedBody); assert.equal(scope.pendingMocks().length, 0); assert.equal(fastlyScope.pendingMocks().length, 0); return null; }, function finish(err) { if ( err ) { return done(err); } testHelper.deleteRedisKeys({'map_tpl|localhost': 0}, done); } ); }); it("invalidates surrogate on template deletion", function(done) { var scope = nock(varnishHttpUrl) .intercept('/key', 'PURGE') .matchHeader('Invalidation-Match', invalidationMatchHeader) .reply(204, ''); var fastlyScope = nock(FastlyPurge.FASTLY_API_ENDPOINT) .post(fastlyPurgePath) .matchHeader('Fastly-Key', FAKE_FASTLY_API_KEY) .matchHeader('Fastly-Soft-Purge', 1) .matchHeader('Accept', 'application/json') .reply(200, { status:'ok' }); step( function createTemplateToDelete() { createTemplate(this); }, function deleteValidTemplate(err) { if (err) { throw err; } var deleteTemplateRequest = { url: '/api/v1/map/named/' + expectedTemplateId + '/?api_key=1234', method: 'DELETE', headers: { host: templateOwner, 'Content-Type': 'application/json' } }; var next = this; assert.response(server, deleteTemplateRequest, { status: 204 }, function(res) { setTimeout(function() { next(null, res); }, 50); } ); }, function checkValidUpdate(err) { if (err) { throw err; } assert.equal(scope.pendingMocks().length, 0); assert.equal(fastlyScope.pendingMocks().length, 0); return null; }, function finish(err) { done(err); } ); }); it("should update template even if surrogate key invalidation fails", function(done) { var scope = nock(varnishHttpUrl) .intercept('/key', 'PURGE') .matchHeader('Invalidation-Match', invalidationMatchHeader) .reply(503, ''); var fastlyScope = nock(FastlyPurge.FASTLY_API_ENDPOINT) .post(fastlyPurgePath) .matchHeader('Fastly-Key', FAKE_FASTLY_API_KEY) .matchHeader('Fastly-Soft-Purge', 1) .matchHeader('Accept', 'application/json') .reply(200, { status:'ok' }); step( function createTemplateToUpdate() { createTemplate(this); }, function putValidTemplate(err) { if (err) { throw err; } var updateTemplateRequest = { url: '/api/v1/map/named/' + expectedTemplateId + '/?api_key=1234', method: 'PUT', headers: { host: templateOwner, 'Content-Type': 'application/json' }, data: JSON.stringify(templateUpdated) }; var next = this; assert.response(server, updateTemplateRequest, { status: 200 }, function(res) { setTimeout(function() { next(null, res); }, 50); } ); }, function checkValidUpdate(err, res) { if (err) { throw err; } var parsedBody = JSON.parse(res.body); assert.deepEqual(parsedBody, expectedBody); assert.equal(scope.pendingMocks().length, 0); assert.equal(fastlyScope.pendingMocks().length, 0); return null; }, function finish(err) { if ( err ) { return done(err); } testHelper.deleteRedisKeys({'map_tpl|localhost': 0}, done); } ); }); });
test/acceptance/cache/surrogate_keys_invalidation.js
require('../../support/test_helper'); var assert = require('../../support/assert'); var redis = require('redis'); var step = require('step'); var FastlyPurge = require('fastly-purge'); var _ = require('underscore'); var NamedMapsCacheEntry = require(__dirname + '/../../../lib/cartodb/cache/model/named_maps_entry'); var CartodbWindshaft = require(__dirname + '/../../../lib/cartodb/server'); describe('templates surrogate keys', function() { var redisClient = redis.createClient(global.environment.redis.port); var serverOptions = require('../../../lib/cartodb/server_options'); // Enable Varnish purge for tests var varnishHost = serverOptions.varnish_host; serverOptions.varnish_host = '127.0.0.1'; var varnishPurgeEnabled = serverOptions.varnish_purge_enabled; serverOptions.varnish_purge_enabled = true; var fastlyConfig = serverOptions.fastly; var FAKE_FASTLY_API_KEY = 'fastly-api-key'; var FAKE_FASTLY_SERVICE_ID = 'fake-service-id'; serverOptions.fastly = { enabled: true, // the fastly api key apiKey: FAKE_FASTLY_API_KEY, // the service that will get surrogate key invalidation serviceId: FAKE_FASTLY_SERVICE_ID }; var server = new CartodbWindshaft(serverOptions); var templateOwner = 'localhost'; var templateName = 'acceptance'; var expectedTemplateId = templateName; var template = { version: '0.0.1', name: templateName, auth: { method: 'open' }, layergroup: { version: '1.2.0', layers: [ { options: { sql: 'select 1 cartodb_id, null::geometry as the_geom_webmercator', cartocss: '#layer { marker-fill:blue; }', cartocss_version: '2.3.0' } } ] } }; var templateUpdated = _.extend({}, template, {layergroup: {layers: [{ type: 'plain', options: { color: 'red' } }]} }); var expectedBody = { template_id: expectedTemplateId }; var varnishHttpUrl = [ 'http://', serverOptions.varnish_host, ':', serverOptions.varnish_http_port ].join(''); var cacheEntryKey = new NamedMapsCacheEntry(templateOwner, templateName).key(); var invalidationMatchHeader = '\\b' + cacheEntryKey + '\\b'; var fastlyPurgePath = '/service/' + FAKE_FASTLY_SERVICE_ID + '/purge/' + encodeURIComponent(cacheEntryKey); var nock = require('nock'); nock.enableNetConnect(/(127.0.0.1:5555|cartocdn.com)/); after(function(done) { serverOptions.varnish_purge_enabled = false; serverOptions.varnish_host = varnishHost; serverOptions.varnish_purge_enabled = varnishPurgeEnabled; serverOptions.fastly = fastlyConfig; nock.restore(); done(); }); function createTemplate(callback) { var postTemplateRequest = { url: '/api/v1/map/named?api_key=1234', method: 'POST', headers: { host: templateOwner, 'Content-Type': 'application/json' }, data: JSON.stringify(template) }; step( function postTemplate() { var next = this; assert.response(server, postTemplateRequest, { status: 200 }, function(res) { next(null, res); } ); }, function rePostTemplate(err, res) { if (err) { throw err; } var parsedBody = JSON.parse(res.body); assert.deepEqual(parsedBody, expectedBody); return true; }, function finish(err) { callback(err); } ); } it("invalidates surrogate keys on template update", function(done) { var scope = nock(varnishHttpUrl) .intercept('/key', 'PURGE') .matchHeader('Invalidation-Match', invalidationMatchHeader) .reply(204, ''); var fastlyScope = nock(FastlyPurge.FASTLY_API_ENDPOINT) .post(fastlyPurgePath) .matchHeader('Fastly-Key', FAKE_FASTLY_API_KEY) .matchHeader('Fastly-Soft-Purge', 1) .matchHeader('Accept', 'application/json') .reply(200, { status:'ok' }); step( function createTemplateToUpdate() { createTemplate(this); }, function putValidTemplate(err) { if (err) { throw err; } var updateTemplateRequest = { url: '/api/v1/map/named/' + expectedTemplateId + '/?api_key=1234', method: 'PUT', headers: { host: templateOwner, 'Content-Type': 'application/json' }, data: JSON.stringify(templateUpdated) }; var next = this; assert.response(server, updateTemplateRequest, { status: 200 }, function(res) { setTimeout(function() { next(null, res); }, 50); } ); }, function checkValidUpdate(err, res) { if (err) { throw err; } var parsedBody = JSON.parse(res.body); assert.deepEqual(parsedBody, expectedBody); assert.equal(scope.pendingMocks().length, 0); assert.equal(fastlyScope.pendingMocks().length, 0); return null; }, function finish(err) { if ( err ) { return done(err); } redisClient.keys("map_*|localhost", function(err, keys) { if ( err ) { return done(err); } redisClient.del(keys, function(err) { return done(err); }); }); } ); }); it("invalidates surrogate on template deletion", function(done) { var scope = nock(varnishHttpUrl) .intercept('/key', 'PURGE') .matchHeader('Invalidation-Match', invalidationMatchHeader) .reply(204, ''); var fastlyScope = nock(FastlyPurge.FASTLY_API_ENDPOINT) .post(fastlyPurgePath) .matchHeader('Fastly-Key', FAKE_FASTLY_API_KEY) .matchHeader('Fastly-Soft-Purge', 1) .matchHeader('Accept', 'application/json') .reply(200, { status:'ok' }); step( function createTemplateToDelete() { createTemplate(this); }, function deleteValidTemplate(err) { if (err) { throw err; } var deleteTemplateRequest = { url: '/api/v1/map/named/' + expectedTemplateId + '/?api_key=1234', method: 'DELETE', headers: { host: templateOwner, 'Content-Type': 'application/json' } }; var next = this; assert.response(server, deleteTemplateRequest, { status: 204 }, function(res) { setTimeout(function() { next(null, res); }, 50); } ); }, function checkValidUpdate(err) { if (err) { throw err; } assert.equal(scope.pendingMocks().length, 0); assert.equal(fastlyScope.pendingMocks().length, 0); return null; }, function finish(err) { done(err); } ); }); it("should update template even if surrogate key invalidation fails", function(done) { var scope = nock(varnishHttpUrl) .intercept('/key', 'PURGE') .matchHeader('Invalidation-Match', invalidationMatchHeader) .reply(503, ''); var fastlyScope = nock(FastlyPurge.FASTLY_API_ENDPOINT) .post(fastlyPurgePath) .matchHeader('Fastly-Key', FAKE_FASTLY_API_KEY) .matchHeader('Fastly-Soft-Purge', 1) .matchHeader('Accept', 'application/json') .reply(200, { status:'ok' }); step( function createTemplateToUpdate() { createTemplate(this); }, function putValidTemplate(err) { if (err) { throw err; } var updateTemplateRequest = { url: '/api/v1/map/named/' + expectedTemplateId + '/?api_key=1234', method: 'PUT', headers: { host: templateOwner, 'Content-Type': 'application/json' }, data: JSON.stringify(templateUpdated) }; var next = this; assert.response(server, updateTemplateRequest, { status: 200 }, function(res) { setTimeout(function() { next(null, res); }, 50); } ); }, function checkValidUpdate(err, res) { if (err) { throw err; } var parsedBody = JSON.parse(res.body); assert.deepEqual(parsedBody, expectedBody); assert.equal(scope.pendingMocks().length, 0); assert.equal(fastlyScope.pendingMocks().length, 0); return null; }, function finish(err) { if ( err ) { return done(err); } redisClient.keys("map_*|localhost", function(err, keys) { if ( err ) { return done(err); } redisClient.del(keys, function(err) { return done(err); }); }); } ); }); });
Remove redis client
test/acceptance/cache/surrogate_keys_invalidation.js
Remove redis client
<ide><path>est/acceptance/cache/surrogate_keys_invalidation.js <del>require('../../support/test_helper'); <add>var testHelper = require('../../support/test_helper'); <ide> <ide> var assert = require('../../support/assert'); <del>var redis = require('redis'); <ide> var step = require('step'); <ide> var FastlyPurge = require('fastly-purge'); <ide> var _ = require('underscore'); <ide> <ide> <ide> describe('templates surrogate keys', function() { <del> <del> var redisClient = redis.createClient(global.environment.redis.port); <ide> <ide> var serverOptions = require('../../../lib/cartodb/server_options'); <ide> <ide> if ( err ) { <ide> return done(err); <ide> } <del> redisClient.keys("map_*|localhost", function(err, keys) { <del> if ( err ) { <del> return done(err); <del> } <del> redisClient.del(keys, function(err) { <del> return done(err); <del> }); <del> }); <add> testHelper.deleteRedisKeys({'map_tpl|localhost': 0}, done); <ide> } <ide> ); <ide> }); <ide> if ( err ) { <ide> return done(err); <ide> } <del> redisClient.keys("map_*|localhost", function(err, keys) { <del> if ( err ) { <del> return done(err); <del> } <del> redisClient.del(keys, function(err) { <del> return done(err); <del> }); <del> }); <add> testHelper.deleteRedisKeys({'map_tpl|localhost': 0}, done); <ide> } <ide> ); <ide> });
JavaScript
mit
aa5e3bf0ea409c9fc13e8cc48d6ebe2c43b3272d
0
briehl/narrative,msneddon/narrative,msneddon/narrative,briehl/narrative,kbase/narrative,pranjan77/narrative,mlhenderson/narrative,nlharris/narrative,mlhenderson/narrative,pranjan77/narrative,aekazakov/narrative,jmchandonia/narrative,kbase/narrative,jmchandonia/narrative,rsutormin/narrative,mlhenderson/narrative,rsutormin/narrative,kbase/narrative,scanon/narrative,jmchandonia/narrative,msneddon/narrative,msneddon/narrative,scanon/narrative,rsutormin/narrative,psnovichkov/narrative,nlharris/narrative,msneddon/narrative,jmchandonia/narrative,nlharris/narrative,scanon/narrative,msneddon/narrative,nlharris/narrative,scanon/narrative,rsutormin/narrative,nlharris/narrative,scanon/narrative,mlhenderson/narrative,mlhenderson/narrative,jmchandonia/narrative,msneddon/narrative,briehl/narrative,nlharris/narrative,rsutormin/narrative,nlharris/narrative,rsutormin/narrative,psnovichkov/narrative,aekazakov/narrative,pranjan77/narrative,aekazakov/narrative,psnovichkov/narrative,aekazakov/narrative,aekazakov/narrative,psnovichkov/narrative,pranjan77/narrative,briehl/narrative,kbase/narrative,briehl/narrative,pranjan77/narrative,kbase/narrative,aekazakov/narrative,psnovichkov/narrative,jmchandonia/narrative,kbase/narrative,briehl/narrative,pranjan77/narrative,psnovichkov/narrative,psnovichkov/narrative,mlhenderson/narrative,briehl/narrative,pranjan77/narrative,jmchandonia/narrative,scanon/narrative
(function($, undefined) { var workspaceURL = "https://kbase.us/services/workspace"; var workspaceClient = new workspaceService(workspaceURL); notLoggedIn(); $(function() { /* $(document).on('loggedIn.kbase', function(event, token) { console.debug("logged in") loadPage(); }); */ $(document).on('loggedOut.kbase', function(event, token) { console.debug("logged out") notLoggedIn(); }); var loginWidget = $("#login-widget").kbaseLogin({ style: "narrative", rePrompt: false, login_callback: function(args) { alert("callback call"); loadPage(); }, logout_callback: function(args) { notLoggedIn(); }, prior_login_callback: function(args) { loadPage(); }, }); $("#signinbtn").click(function() { showLoading(); $("#login_error").hide(); loginWidget.login( $('#kbase_username').val(), $('#kbase_password').val(), function(args) { console.log(args); if (args.success === 1) { this.registerLogin(args); loadPage(); doneLoading(); $("#login-widget").show(); } else { $("#loading-indicator").hide(); $("#login_error").html(args.message); $("#login_error").show(); } } ); }); $('#kbase_password').keypress(function(e){ if(e.which == 13){//Enter key pressed $('#signinbtn').click(); } }); }); function notLoggedIn() { console.debug("Not logged in"); $("#header_banner").hide(); $("#alt_banner").show(); $("#login-widget").hide(); $("#login_section").show(); $("#public_section").show(); $("#newsfeed_column").hide(); $("#narrative_column").hide(); $("#login-widget").hide(); } function loadPage() { $("#alt_banner").hide(); // Hmmm??? $("#header_banner").show(); // Hmmm?? $("#login_section").hide(); $("#public_section").hide(); $("#newsfeed_column").show(); $("#narrative_column").show(); $("#login-widget").show(); var token = $("#login-widget").kbaseLogin("token"); var userId = $("#login-widget").kbaseLogin("get_kbase_cookie", "user_id"); var userName = $("#login-widget").kbaseLogin("get_kbase_cookie", "name"); if (!userName) { console.warn("No user name found"); userName = "KBase User"; } else { console.debug("user name = " + userName); } $("#kb_name").html(userName); loadProjectFeed(token, userId); loadRecentNarratives(); loadRecentProjects(); }; /** Get current user id */ function _user_id() { return $("#login-widget").kbaseLogin("get_kbase_cookie", "user_id"); } /** Get current token */ function _token() { return $("#login-widget").kbaseLogin("token"); } function clientError(error) { console.debug(error); }; // feed function loadProjectFeed (token, userId) { var workspaceURL = "https://www.kbase.us/services/workspace", wsClient = new workspaceService(workspaceURL); //get auth token if (token !== "null") { wsClient.list_workspaces({ auth: token }, function(results) { var data = { rows: []}; var count = 0; //first sort results = _.sortBy(results, function(ws) { return ws[2]; }); results.reverse(); //populate the data structure for the template _.every(results, function(workspace){ if (userId != workspace[1]) { var moddate = workspace[2]; moddate = moddate.replace(/T/g," "); var row = {username: workspace[1], workspace: workspace[0], date: moddate}; data.rows.push(row); count++; } return count !== 10; }); //call the template rows = ich.workspaces(data) $('#people_feed').append(rows); $('#loading-indicator-ws').hide(); }, function(err) { console.log(err); } ); } } /** Make a workspace array into a mapping */ function _ws_arr2obj(arr) { var obj = {}, fld = project.obj_meta_fields; for (i=0; i < fld.length; i++) { obj[fld[i]] = arr[i]; } return obj; } /** * Get all objects in home workspace that satisfy 'params'. * * @return mapping, keyed by object id. */ function getHomeWorkspaceObjects(params, callback) { var home = _user_id() + '_home'; $.extend(params, {auth: _token(), workspace: home}); //console.debug("list_workspace_objects, params=", params); project.ws_client.list_workspace_objects(params, function(obj_list) { var result = {}; //console.debug("Obj list=",obj_list); $.each(obj_list, function(idx, val) { //console.debug("Home workspace narrative at " + idx + ":", val); var obj_meta = _ws_arr2obj(val); result[obj_meta.id] = obj_meta; }); callback(result); }, function() { var args = [].slice.call(arguments); console.warn("Error getting home workspace objects. ", args); callback({}); } ); }; /** * Loads data and populates the recent narratives section of the user home page. * * Calls: * project.get_narratives(), to load narratives in a project * getHomeWorkspaceObjects(), to add narratives from the user's home ws * _showRecentNarratives(), when done loading, to show the results */ function loadRecentNarratives() { // show user that we are loading $("#no_narratives").hide(); $("#narratives_loading").show(); // get the data & then show it project.get_narratives({ callback: function(results) { console.debug("Project narratives:", results); // augment project narratives with user's home objects getHomeWorkspaceObjects({type: project.narrative_type}, function(home_narr) { $.extend(results, home_narr); console.debug("Project + home narratives:", results); // show combined results if (Object.keys(results).length > 0) { console.debug("Show recent narratives"); _showRecentNarratives(results); } // no data? show that, too else { $("#no_narratives").show(); } $("#narratives_loading").hide(); } ); } }); } /** * Update HTML to show >0 recent narratives. * * @pre results is non-empty * @return undefined */ function _showRecentNarratives(results) { var userId = _user_id(); // make a list that can be sorted //console.debug("results:", results); var indexed = []; _.each(Object.keys(results), function(key, i, lst) { indexed.push({'id': results[key].id, 'date': results[key].moddate}); }); //console.debug("indexed:",indexed); // sort the list by the date indexed = _.sortBy(indexed, 'date'); indexed.reverse(); // build data from first <= 5 items in the sorted list var rows = [], n = indexed.length; var limit = _.min([n, 5]); for (var i=0; i < limit; i++) { var narr = results[indexed[i].id]; var name = narr.id.replace(/_/g," "); //var project_id = narrative.workspace.replace(/_/g," "); rows.push({ "name": name, "narrative_id": narr.id, "project_id": narr.workspace, "userId": userId }); } //populate the html template var rows2 = ich.recent_narratives({'rows': rows}); $('#recent_narratives_list').append(rows2); } //populates the recent projects portion of the user home function loadRecentProjects() { $("#no_projects").hide(); $("#projects_loading").show(); project.get_projects({ callback: function(projectresults) { console.log("got here2"); if (Object.keys(projectresults).length > 0) { var data = { rows: []}; console.log("got here"); //first sort results = _.sortBy(projectresults, function(project_id) { return project_id.moddate; }); results.reverse(); //populate the data structure for the template var count = 0; _.every(projectresults, function(project_id){ var name = project_id.id.replace(/_/g," "); data.rows.push({ "name": name, "project_id": project_id.id }); count++; return count !== 5; }); //populate the html template var rows = ich.recent_projects(data) $('#recent_projects_list').append(rows); $("#projects_loading").hide(); var options = ich.project_select_options(data); $('#new_narrative_project').append(options); } else { $("#projects_loading").hide(); $("#no_projects").show(); } } }); }; function showLoading() { //$('#login_form button[type="submit"]').attr('disabled','disabled'); $("#loading-indicator").show(); } function doneLoading() { $("#loading-indicator").hide(); } //add click handler for creating new narrative $( "#new_narrative_submit" ).click(function() { var name = $("#new_narrative_name").val(); var project_id = $("#new_narrative_project").val(); //no spaces allowed in narrative name name = name.replace(/ /g,"_"); name = name.replace(/\W/g,""); if (project_id === "") { project_id = undefined; } //create the new narrative in ws project.new_narrative({ narrative_id: name, project_id: project_id, callback: function(results) { console.log("narrative created."); //redirect to the narrative page var userId = $("#login-widget").kbaseLogin("get_kbase_cookie", "user_id"); window.location.href = "http://narrative.kbase.us/narratives/"+userId+"/"+project_id+"."+name; } }); }); })( jQuery );
functional_site/static/js/users.js
(function($, undefined) { var workspaceURL = "https://kbase.us/services/workspace"; var workspaceClient = new workspaceService(workspaceURL); notLoggedIn(); $(function() { /* $(document).on('loggedIn.kbase', function(event, token) { console.debug("logged in") loadPage(); }); */ $(document).on('loggedOut.kbase', function(event, token) { console.debug("logged out") notLoggedIn(); }); var loginWidget = $("#login-widget").kbaseLogin({ style: "narrative", rePrompt: false, login_callback: function(args) { alert("callback call"); loadPage(); }, logout_callback: function(args) { notLoggedIn(); }, prior_login_callback: function(args) { loadPage(); }, }); $("#signinbtn").click(function() { showLoading(); $("#login_error").hide(); loginWidget.login( $('#kbase_username').val(), $('#kbase_password').val(), function(args) { console.log(args); if (args.success === 1) { this.registerLogin(args); loadPage(); doneLoading(); $("#login-widget").show(); } else { $("#loading-indicator").hide(); $("#login_error").html(args.message); $("#login_error").show(); } } ); }); $('#kbase_password').keypress(function(e){ if(e.which == 13){//Enter key pressed $('#signinbtn').click(); } }); }); function notLoggedIn() { console.debug("Not logged in"); $("#header_banner").hide(); $("#alt_banner").show(); $("#login-widget").hide(); $("#login_section").show(); $("#public_section").show(); $("#newsfeed_column").hide(); $("#narrative_column").hide(); $("#login-widget").hide(); } function loadPage() { $("#alt_banner").hide(); // Hmmm??? $("#header_banner").show(); // Hmmm?? $("#login_section").hide(); $("#public_section").hide(); $("#newsfeed_column").show(); $("#narrative_column").show(); $("#login-widget").show(); var token = $("#login-widget").kbaseLogin("token"); var userId = $("#login-widget").kbaseLogin("get_kbase_cookie", "user_id"); var userName = $("#login-widget").kbaseLogin("get_kbase_cookie", "name"); if (!userName) { console.warn("No user name found"); userName = "KBase User"; } else { console.debug("user name = " + userName); } $("#kb_name").html(userName); loadProjectFeed(token, userId); loadRecentNarratives(); loadRecentProjects(); }; /** Get current user id */ function _user_id() { return $("#login-widget").kbaseLogin("get_kbase_cookie", "user_id"); } /** Get current token */ function _token() { return $("#login-widget").kbaseLogin("token"); } function clientError(error) { console.debug(error); }; // feed function loadProjectFeed (token, userId) { var workspaceURL = "https://www.kbase.us/services/workspace", wsClient = new workspaceService(workspaceURL); //get auth token if (token !== "null") { wsClient.list_workspaces({ auth: token }, function(results) { var data = { rows: []}; var count = 0; //first sort results = _.sortBy(results, function(ws) { return ws[2]; }); results.reverse(); //populate the data structure for the template _.every(results, function(workspace){ if (userId != workspace[1]) { var moddate = workspace[2]; moddate = moddate.replace(/T/g," "); var row = {username: workspace[1], workspace: workspace[0], date: moddate}; data.rows.push(row); count++; } return count !== 10; }); //call the template rows = ich.workspaces(data) $('#people_feed').append(rows); $('#loading-indicator-ws').hide(); }, function(err) { console.log(err); } ); } } /** Make a workspace array into a mapping */ function _ws_arr2obj(arr) { var obj = {}, fld = project.obj_meta_fields; for (i=0; i < fld.length; i++) { obj[fld[i]] = arr[i]; } return obj; } /** * Get all objects in home workspace that satisfy 'params'. * * @return mapping, keyed by object id. */ function getHomeWorkspaceObjects(params, callback) { var home = _user_id() + '_home'; $.extend(params, {auth: _token(), workspace: home}); //console.debug("list_workspace_objects, params=", params); project.ws_client.list_workspace_objects(params, function(obj_list) { var result = {}; //console.debug("Obj list=",obj_list); $.each(obj_list, function(idx, val) { //console.debug("Home workspace narrative at " + idx + ":", val); var obj_meta = _ws_arr2obj(val); result[obj_meta.id] = obj_meta; }); callback(result); }, function() { var args = [].slice.call(arguments); console.warn("Error getting home workspace objects. ", args); callback({}); } ); }; /** * Loads data and populates the recent narratives section of the user home page. * * Calls: * project.get_narratives(), to load narratives in a project * getHomeWorkspaceObjects(), to add narratives from the user's home ws * _showRecentNarratives(), when done loading, to show the results */ function loadRecentNarratives() { // show user that we are loading $("#no_narratives").hide(); $("#narratives_loading").show(); // get the data & then show it project.get_narratives({ callback: function(results) { console.debug("Project narratives:", results); // augment project narratives with user's home objects getHomeWorkspaceObjects({type: project.narrative_type}, function(home_narr) { $.extend(results, home_narr); console.debug("Project + home narratives:", results); // show combined results if (Object.keys(results).length > 0) { console.debug("Show recent narratives"); _showRecentNarratives(results); } // no data? show that, too else { $("#no_narratives").show(); } $("#narratives_loading").hide(); } ); } }); } /** * Update HTML to show >0 recent narratives. * * @pre results is non-empty * @return undefined */ function _showRecentNarratives(results) { var userId = _user_id(); // make a list that can be sorted //console.debug("results:", results); var indexed = []; _.each(Object.keys(results), function(key, i, lst) { indexed.push({'id': results[key].id, 'date': results[key].moddate}); }); //console.debug("indexed:",indexed); // sort the list by the date indexed = _.sortBy(indexed, 'date'); indexed.reverse(); // build data from first <= 5 items in the sorted list var rows = [], n = indexed.length; var limit = min([n, 5]); for (var i=0; i < limit; i++) { var narr = results[indexed[i].id]; var name = narr.id.replace(/_/g," "); //var project_id = narrative.workspace.replace(/_/g," "); rows.push({ "name": name, "narrative_id": narr.id, "project_id": narr.workspace, "userId": userId }); } //populate the html template var rows2 = ich.recent_narratives({'rows': rows}); $('#recent_narratives_list').append(rows2); } //populates the recent projects portion of the user home function loadRecentProjects() { $("#no_projects").hide(); $("#projects_loading").show(); project.get_projects({ callback: function(projectresults) { console.log("got here2"); if (Object.keys(projectresults).length > 0) { var data = { rows: []}; console.log("got here"); //first sort results = _.sortBy(projectresults, function(project_id) { return project_id.moddate; }); results.reverse(); //populate the data structure for the template var count = 0; _.every(projectresults, function(project_id){ var name = project_id.id.replace(/_/g," "); data.rows.push({ "name": name, "project_id": project_id.id }); count++; return count !== 5; }); //populate the html template var rows = ich.recent_projects(data) $('#recent_projects_list').append(rows); $("#projects_loading").hide(); var options = ich.project_select_options(data); $('#new_narrative_project').append(options); } else { $("#projects_loading").hide(); $("#no_projects").show(); } } }); }; function showLoading() { //$('#login_form button[type="submit"]').attr('disabled','disabled'); $("#loading-indicator").show(); } function doneLoading() { $("#loading-indicator").hide(); } //add click handler for creating new narrative $( "#new_narrative_submit" ).click(function() { var name = $("#new_narrative_name").val(); var project_id = $("#new_narrative_project").val(); //no spaces allowed in narrative name name = name.replace(/ /g,"_"); name = name.replace(/\W/g,""); if (project_id === "") { project_id = undefined; } //create the new narrative in ws project.new_narrative({ narrative_id: name, project_id: project_id, callback: function(results) { console.log("narrative created."); //redirect to the narrative page var userId = $("#login-widget").kbaseLogin("get_kbase_cookie", "user_id"); window.location.href = "http://narrative.kbase.us/narratives/"+userId+"/"+project_id+"."+name; } }); }); })( jQuery );
typo fix
functional_site/static/js/users.js
typo fix
<ide><path>unctional_site/static/js/users.js <ide> <ide> // build data from first <= 5 items in the sorted list <ide> var rows = [], n = indexed.length; <del> var limit = min([n, 5]); <add> var limit = _.min([n, 5]); <ide> for (var i=0; i < limit; i++) { <ide> var narr = results[indexed[i].id]; <ide> var name = narr.id.replace(/_/g," ");
JavaScript
mit
3c52528c1fcc4f103447ded03fa8a43c1fc7509a
0
partridgejiang/Kekule.js,partridgejiang/Kekule.js,partridgejiang/Kekule.js,partridgejiang/Kekule.js,partridgejiang/Kekule.js,partridgejiang/Kekule.js
(function(){ "use strict"; var PS = Class.PropertyScope; var AU = Kekule.ArrayUtils; var KUnit = Kekule.Unit; /** * Base namespace for spectra. * @namespace */ Kekule.Spectroscopy = {}; /** * Enumeration of data mode of spectrum variable. * @enum */ Kekule.Spectroscopy.DataMode = { /** Value points are continuous, e.g. in IR data table. */ CONTINUOUS: 1, /** Value points are discrete, e.g. in MS peak table. */ PEAK: 2 }; /** * Some util methods about spectroscopy. * @class */ Kekule.Spectroscopy.Utils = { /** * Merge two data ranges. * Each item is a hash like {x: {min: minValue, max: maxValue}, y: {min: minValue, max: maxValue}}. * @param {Hash} r1 * @param {Hash} r2 * @returns {Hash} */ mergeDataRange: function(r1, r2) { var result = {}; var vars = AU.clone(Kekule.ObjUtils.getOwnedFieldNames(r1)); AU.pushUnique(vars, Kekule.ObjUtils.getOwnedFieldNames(r2)); for (var i = 0, l = vars.length; i < l; ++i) { var varSymbol =vars[i]; if (!r1[varSymbol]) result[varSymbol] = Object.extend(r2[varSymbol]); else if (!r2[varSymbol]) result[varSymbol] = Object.extend(r1[varSymbol]); else { result[varSymbol] = { 'min': (r1[varSymbol].min < r2[varSymbol].min)? r1[varSymbol].min: r2[varSymbol].min, 'max': (r1[varSymbol].max > r2[varSymbol].max)? r1[varSymbol].max: r2[varSymbol].max } } } return result; }, /** * Returns scale point information for a data range. * @param {Number} dataRangeMin * @param {Number} dataRangeMax * @param {Int} preferredScaleSectionCount * @returns {Hash} */ calcScalePointInfo: function(dataRangeMin, dataRangeMax, preferredScaleSectionCount) { if (preferredScaleSectionCount <= 0) preferredScaleSectionCount = 10; // avoid exception, set a default count value here var digitCounts = [Math.log10(Math.abs(dataRangeMin)), Math.log10(Math.abs(dataRangeMax))]; var digitCountMax = Math.floor(Math.max(digitCounts[0], digitCounts[1])); var digitCountMin = (Math.sign(dataRangeMin) === Math.sign(dataRangeMax))? Math.floor(Math.min(digitCounts[0], digitCounts[1], 0)): -Infinity; var useSciForm = (digitCountMax > 6); // need to use sci form if the digit num is very large to compact space var dataDelta = dataRangeMax - dataRangeMin; var deltaBetweenScales = dataDelta / preferredScaleSectionCount; var deltaBetweenScalesDigitCount = Math.max(Math.floor(Math.log10(Math.abs(deltaBetweenScales))), digitCountMin); var scaleBase = Math.pow(10, deltaBetweenScalesDigitCount); var actualDeltaBetweenScales; if (actualDeltaBetweenScales < 10 && dataDelta > 0.5) // major scale should be even number in 1-10 scope { actualDeltaBetweenScales = Math.ceil(actualDeltaBetweenScales / scaleBase / 2) * 2 * scaleBase; } else { actualDeltaBetweenScales = Math.ceil(deltaBetweenScales / scaleBase) * scaleBase; } var scaleFrom = Math.ceil(dataRangeMin / actualDeltaBetweenScales) * actualDeltaBetweenScales; var scaleTo = Math.floor(dataRangeMax / actualDeltaBetweenScales) * actualDeltaBetweenScales; var result = { 'useSciForm': useSciForm, 'scaleFrom': scaleFrom, 'scaleTo': scaleTo, 'scaleSectionCount': Math.round((scaleTo - scaleFrom) / actualDeltaBetweenScales), 'scaleValues': [], 'scaleBase': scaleBase, 'scaleFromOnBase': scaleFrom / scaleBase, 'scaleToOnBase': scaleTo / scaleBase, 'fixDigitsCountAfterPoint': Math.max(-deltaBetweenScalesDigitCount, 0) // record the recommended digits to appear after the decimal point }; for (var i = 0, l = result.scaleSectionCount + 1; i < l; ++i) { result.scaleValues.push(Math.round(i * actualDeltaBetweenScales / scaleBase) * scaleBase + scaleFrom); } //console.log(result, scaleBase); return result; } }; /** * A util object to manage the registered spectrum data value converters. * These converters are used to convert raw spectrum value from one unit to another (e.g., Hz to ppm in NMR). * @class */ Kekule.Spectroscopy.DataValueConverterManager = { /** @private */ _converters: [], /** * Register a converter object. * The converter object should implement the following methods: * { * convert: function(value, varDef, fromUnitObj, toUnitObj, spectrumDataSection, spectrum) => newValue, * canConvert: function(value, varDef, fromUnitObj, toUnitObj, spectrumDataSection, spectrum) => Bool, * getAltUnits: function(varDef, fromUnitObj, spectrumDataSection, spectrum) -> array (optional), returns the recommended alternative unitObjs for spectrum * } * @param {Object} converter */ register: function(converter) { DCM._converters.push(converter); }, /** * Unregister a converter. * @param {Object} converter */ unregister: function(converter) { var index = DMC._converters.indexOf(converter); if (index >= 0) DMC._converters.splice(index, 1); }, /** @private */ doConvert: function(value, varDef, fromUnit, toUnit, spectrumDataSection, spectrum) { if (fromUnit === toUnit) return value; if (!Kekule.NumUtils.isNormalNumber(value)) return value; var converters = DCM._converters; if (converters.length) { var fromUnitObj = Kekule.Unit.getUnit(fromUnit); var toUnitObj = Kekule.Unit.getUnit(toUnit); if (fromUnitObj && toUnitObj) { for (var i = converters.length - 1; i >= 0; --i) { var converter = converters[i]; if (converter.canConvert(value, varDef, fromUnitObj, toUnitObj, spectrumDataSection, spectrum)) return converter.convert(value, varDef, fromUnitObj, toUnitObj, spectrumDataSection, spectrum); } } } // no available converter found, can not convert Kekule.error(Kekule.$L('ErrorMsg.UNABLE_TO_CONVERT_BETWEEN_UNITS').format(fromUnitObj.getKey(), toUnitObj.getKey())); return null; }, /** @private */ getAltUnits: function(varDef, fromUnit, spectrumDataSection, spectrum) { var result = []; var converters = DCM._converters; if (converters.length) { var fromUnitObj = Kekule.Unit.getUnit(fromUnit); if (fromUnitObj) { for (var i = converters.length - 1; i >= 0; --i) { var converter = converters[i]; var subResult = converter.getAltUnits(varDef, fromUnitObj, spectrumDataSection, spectrum) || []; AU.pushUnique(result, subResult); } } } return result; } }; /** @ignore */ var DCM = Kekule.Spectroscopy.DataValueConverterManager; // register the default data value converter DCM.register({ convert: function(value, varDef, fromUnitObj, toUnitObj, spectrumDataSection, spectrum) { return fromUnitObj.convertValueTo(value, toUnitObj); }, canConvert: function(value, varDef, fromUnitObj, toUnitObj, spectrumDataSection, spectrum) { return fromUnitObj.canConvertValueTo(toUnitObj); }, getAltUnits: function(varDef, fromUnitObj, spectrumDataSection, spectrum) { var category = fromUnitObj.category; return category.getConvertableUnits(); } }); // register a converter to convert between NMR frequency and ppm DCM.register({ convert: function(value, varDef, fromUnitObj, toUnitObj, spectrumDataSection, spectrum) { var observeFreq = spectrum.getParameter('observeFrequency'); if (fromUnitObj.category === KUnit.Frequency) // from Hz to ppm { var freq = fromUnitObj.convertValueTo(value, observeFreq.getUnit()); var pureRatio = freq / observeFreq.getValue(); // in ppm * 1e10, in another word, the pure ratio return KUnit.Dimensionless.ONE.convertValueTo(pureRatio, toUnitObj); } else if (fromUnitObj.category === K.Unit.Dimensionless) // from ppm to Hz { var value2 = fromUnitObj.convertValueToStandard(value); var freq = value2 * observeFreq.getValue(); var freqUnit = KUnit.getUnit(observeFreq.getUnit()); return freqUnit.convertValueTo(freq, toUnitObj); } }, canConvert: function(value, varDef, fromUnitObj, toUnitObj, spectrumDataSection, spectrum) { if (spectrum.getSpectrumType() === Kekule.Spectroscopy.SpectrumType.NMR) { var observeFreq = spectrum.getParameter('observeFrequency'); if (observeFreq && Kekule.Unit.getUnit(observeFreq.getUnit()).category === Kekule.Unit.Frequency) { return (fromUnitObj.category === Kekule.Unit.Frequency && toUnitObj.category === Kekule.Unit.Dimensionless) || (fromUnitObj.category === Kekule.Unit.Dimensionless && toUnitObj.category === Kekule.Unit.Frequency); } } return false; }, getAltUnits: function(varDef, fromUnitObj, spectrumDataSection, spectrum) { var result = []; if (spectrum.getSpectrumType() === Kekule.Spectroscopy.SpectrumType.NMR) { var observeFreq = spectrum.getParameter('observeFrequency'); if (observeFreq && Kekule.Unit.getUnit(observeFreq.getUnit()).category === Kekule.Unit.Frequency) { if (fromUnitObj.category === Kekule.Unit.Frequency) result.push(Kekule.Unit.Dimensionless.PARTS_PER_MILLION); else if (fromUnitObj.category === Kekule.Unit.Dimensionless) result = result.concat(Kekule.Unit.Frequency.getConvertableUnits()); } } return result; } }); // register a converter to convert between IR wave length and wave number DCM.register({ convert: function(value, varDef, fromUnitObj, toUnitObj, spectrumDataSection, spectrum) { if (fromUnitObj.category === KUnit.Length) // from wave length to wave number { var standardWaveLengthScalar = fromUnitObj.convertValueToStandardEx(value); var standardWaveNumber = 1 / standardWaveLengthScalar.value; return toUnitObj.convertValueFromStandard(standardWaveNumber); } else if (fromUnitObj.category === KUnit.WaveNumber) // from wave number to wave length { var standardWaveNumberScalar = fromUnitObj.convertValueToStandardEx(value); var standardWaveLength = 1 / standardWaveNumberScalar.value; return toUnitObj.convertValueFromStandard(standardWaveLength); } }, canConvert: function(value, varDef, fromUnitObj, toUnitObj, spectrumDataSection, spectrum) { if (spectrum.getSpectrumType() === Kekule.Spectroscopy.SpectrumType.IR) { return (fromUnitObj.category === Kekule.Unit.Length && toUnitObj.category === Kekule.Unit.WaveNumber) || (fromUnitObj.category === Kekule.Unit.WaveNumber && toUnitObj.category === Kekule.Unit.Length); } return false; }, getAltUnits: function(varDef, fromUnitObj, spectrumDataSection, spectrum) { var result; if (spectrum.getSpectrumType() === Kekule.Spectroscopy.SpectrumType.IR) { if (fromUnitObj.category === Kekule.Unit.Length) result = [Kekule.Unit.WaveNumber.RECIPROCAL_CENTIMETER]; else if (fromUnitObj.category === Kekule.Unit.WaveNumber) result = [Kekule.Unit.Length.getConvertableUnits()]; } return result; } }); /** * Variable used in spectrum. * @class * @augments Kekule.VarDefinition * * @property {String} internalUnit Unit that used in internal data storage. * @property {String} externalUnit Unit that used to expose data to public. */ Kekule.Spectroscopy.SpectrumVarDefinition = Class.create(Kekule.VarDefinition, /** @lends Kekule.Spectroscopy.SpectrumVarDefinition# */ { /** @private */ CLASS_NAME: 'Kekule.Spectroscopy.SpectrumVarDefinition', initProperties: function() { this.defineProp('internalUnit', {'dataType': DataType.STRING, 'serializable': false, 'getter': function() { return this.getUnit(); }, 'setter': function(value) { this.setUnit(value); } }); this.defineProp('externalUnit', {'dataType': DataType.STRING}); }, /** * Returns the actual external unit of var. * Usually this function returns the value of {@link Kekule.Spectroscopy.SpectrumVarDefinition.externalUnit} * If it is not set, the result will be the same as internalUnit. * @returns {String} */ getActualExternalUnit: function() { return this.getExternalUnit() || this.getInternalUnit(); }, /** * Whether the external unit setting of this var differs from the internal unit. * @returns {Bool} */ hasDifferentExternalUnit: function() { var externalUnit = this.getExternalUnit(); return !!(externalUnit && externalUnit !== this.getInternalUnit()); } }); /** * Represent part of data in a spectrum. * @class * * @param {String} name * @param {Kekule.Spectroscopy.SpectrumData} parent Parent spectrum data object. * @param {Array} localVariables Array of variable definition objects or symbols. * * @property {Kekule.Spectroscopy.SpectrumData} parent Parent spectrum data object. * @property {Array} localVarInfos Stores the local variable information. Each item is a hash containing fields {'symbol', 'range'(optional)}. * @property {Array} varSymbols Array of variable symbols such as ['X', 'Y']. * @property {Int} mode Data mode of section, continuous or peak. * @property {Hash} peakRoot * @property {String} name * @property {String} title */ Kekule.Spectroscopy.SpectrumDataSection = Class.create(Kekule.ChemObject, /** @lends Kekule.Spectroscopy.SpectrumDataSection# */ { /** @private */ CLASS_NAME: 'Kekule.Spectroscopy.SpectrumDataSection', /** @private */ initialize: function(name, parent, localVariables) { this.setPropStoreFieldValue('name', name); this.setPropStoreFieldValue('localVarInfos', []); this.setPropStoreFieldValue('dataItems', []); this.setPropStoreFieldValue('parent', parent); this.tryApplySuper('initialize', []); //this.setLocalVarSymbols(localVariables); if (localVariables) this.setLocalVariables(localVariables); this.setDataSorted(true); this._cache = {}; // private //this.setPropStoreFieldValue('variables', variables? AU.clone(variables): []); }, doFinalize: function() { if (this.getParent() && this.getParent().removeChild) { // remove item in parent first this.getParent().removeChild(this); } this.clear(); var variables = this.getVariables(); for (var i = 0, l = variables.length; i < l; ++i) { variables[i].finalize(); } this.setPropStoreFieldValue('localVarInfos', null); this.tryApplySuper('doFinalize'); }, /** @private */ initProperties: function() { this.defineProp('parent', {'dataType': 'Kekule.MapEx', 'setter': null, 'serializable': false}); this.defineProp('name', {'dataType': DataType.STRING}); this.defineProp('title', {'dataType': DataType.STRING}); /* this.defineProp('variables', {'dataType': DataType.ARRAY}); this.defineProp('varSymbols', {'dataType': DataType.ARRAY, 'setter': null, 'scope': PS.PRIVATE, 'getter': function() { var result = []; var list = this.getVariables(); for (var j = 0, jj = list.length; j < jj; ++j) { var varDef = list[j]; result.push(varDef.getSymbol()); } return result; }}); */ this.defineProp('localVarInfos', {'dataType': DataType.ARRAY, 'setter': null}); this.defineProp('localVarSymbols', {'dataType': DataType.ARRAY, 'scope': PS.PRIVATE, 'serializable': false, 'getter': function() { var result = []; var list = this.getActualLocalVarInfos(); if (list && list.length) { for (var j = 0, jj = list.length; j < jj; ++j) { var info = list[j]; //result.push(info.varDef.getSymbol()); result.push(info.symbol); } } /* else // localVarInfos is not initialized yet, read from the storage result = this.getPropStoreFieldValue('localVarSymbols'); */ return result; }, 'setter': function(value) { var v = value || []; //this.setPropStoreFieldValue('localVarSymbols', v); this._updateLocalVarInfosFromSymbols(v); } }); this.defineProp('mode', {'dataType': DataType.INT, 'enumSource': Kekule.Spectroscopy.DataMode, 'setter': function(value) { if (this.getMode() !== value) { //console.log('set mode', value); this.setPropStoreFieldValue('mode', value); this.notifyDataChange(); } } }); this.defineProp('defPeakRoot', {'dataType': DataType.Hash}); // private, stores the data items, each item is a hash, e.g. {x: 1, y: 10, w: 2} this.defineProp('dataItems', {'dataType': DataType.ARRAY, 'setter': null, 'scope': PS.PRIVATE}); }, /** @ignore */ initPropValues: function() { this.tryApplySuper('initPropValues'); this.setMode(Kekule.Spectroscopy.DataMode.CONTINUOUS); }, /** @ignore */ ownerChanged: function(newOwner, oldOwner) { // change the owner of all extra info objects if possible for (var i = 0, l = this.getDataCount(); i < l; ++i) { var extra = this.getExtraInfoAt(i); if (extra && extra.setOwner) extra.setOwner(newOwner); } this.tryApplySuper('ownerChanged', [newOwner, oldOwner]); }, // custom save / load method /** @ignore */ doSaveProp: function(obj, prop, storageNode, serializer) { if (!prop.serializable) return; var propName = prop.name; if (propName === 'dataItems') { var node = serializer.createChildStorageNode(storageNode, serializer.propNameToStorageName('dataItems'), false); var subNode = serializer.createChildStorageNode(node, serializer.propNameToStorageName('values'), true); // create sub node for array serializer.save(obj.getDataItems(), subNode); // save array values in this sub node // extract all extra info of data array and save them var extraInfos = obj._extractAllExtraInfoOfDataItems(); if (extraInfos.length) { var subNode = serializer.createChildStorageNode(node, serializer.propNameToStorageName('extras'), true); serializer.save(extraInfos, subNode); } return true; // this property is handled, do not use default save method } else return false; // use the default method }, /** @ignore */ doLoadProp: function(obj, prop, storageNode, serializer) { if (!prop.serializable) return; var propName = prop.name; if (propName === 'dataItems') { var items = []; var node = serializer.getChildStorageNode(storageNode, serializer.propNameToStorageName('dataItems')); var subNode = serializer.getChildStorageNode(node, serializer.propNameToStorageName('values')); // get sub node for array serializer.load(items, subNode); obj.setPropStoreFieldValue('dataItems', items); // then the extra info var subNode = serializer.getChildStorageNode(node, serializer.propNameToStorageName('extras')); if (subNode) { var extras = []; serializer.load(extras, subNode); obj._writeExtraInfoOfDataItems(extras); } return true; } else return false; // use the default method }, /** @private */ _extractAllExtraInfoOfDataItems: function() { var result = []; for (var i = 0, l = this.getDataCount(); i < l; ++i) { var info = this.getExtraInfoAt(i); if (info) result.push({'index': i, 'info': info}); } return result; }, /** @private */ _writeExtraInfoOfDataItems: function(extras) { for (var i = 0, l = extras.length; i < l; ++i) { var info = extras[i]; this.setExtraInfoAt(info.index, info.info); } }, /* @ignore */ /* parentChanged: function(newParent, oldParent) { //console.log('parent changed', newParent && newParent.getClassName(), oldParent); var result = this.tryApplySuper('parentChanged', newParent, oldParent); // after changing of parent the local var info may be changed as well this._updateLocalVarInfosFromSymbols(this.getLocalVarSymbols()); return result; }, */ /** * Returns whether this data section containing the peak data. * @returns {Bool} */ isPeakSection: function() { return this.getMode() === Kekule.Spectroscopy.DataMode.PEAK; }, /** * Returns the actual parent SpectrumData object. * @returns {Kekule.Spectroscopy.Spectrum} * @private */ getParentSpectrum: function() { var p = this.getParent(); while (p && !(p instanceof Kekule.Spectroscopy.Spectrum) && p.getParent) { p = p.getParent(); } return p; }, /** * Returns the variable definition of parent spectrum data. * @returns {Array} */ getParentVariables: function() { var parent = this.getParentSpectrum(); return (parent && parent.getVariables()) || []; }, /** * Returns the actual local variable infos. * User should use this method rather than ref to localVarInfos property. * @returns {Array} */ getActualLocalVarInfos: function() { var result = AU.clone(this.getLocalVarInfos()); if (!result || !result.length) // inherit all from parent spectrum { var vars = this.getParentVariables(); for (var i = 0, l = vars.length; i < l; ++i) { result.push({'symbol': vars[i].symbol}); } } return result; }, /** @private */ _updateLocalVarInfosFromSymbols: function(varSymbols, silent) { var v = varSymbols || []; var infos = []; var parent = this.getParentSpectrum(); for (var i = 0, l = v.length; i < l; ++i) { var item = v[i]; this._pushLocalVariable(parent, item, infos); } //console.log('update local var infos', varSymbols, infos, parent); this.setPropStoreFieldValue('localVarInfos', infos); //this.setLocalVarInfos(infos); this.notifyPropSet('localVarInfos', infos, silent); }, /** @private */ _pushLocalVariable: function(parent, varSymbol, targetArray) { if (!targetArray) targetArray = this.getLocalVarInfos(); //var parent = this.getParent(); if (parent && parent.getVariable) { var varDef = parent.getVariable(varSymbol); if (varDef) { targetArray.push({/*'varDef': varDef,*/ 'symbol': varSymbol}); } } }, /** * Set the local variable symbols or definitions. * @param {Array} variables Array of var defintion or symbols. */ setLocalVariables: function(variables) { var localVar; var varDefs = [], varSymbols = []; for (var i = 0, l = variables.length; i < l; ++i) { localVar = variables[i]; if (typeof(localVar) === 'string') // a var symbol { varSymbols.push(localVar); } else // var definition { varDefs.push(localVar); } } if (varDefs.length) { this.setPropStoreFieldValue('localVarInfos', varDefs); this.notifyPropSet('localVarInfos', varDefs); } else if (varSymbols.length) { this._updateLocalVarInfosFromSymbols(varSymbols); } }, /** * Returns the local variable information index of variable. * @param {Variant} varIndexOrNameOrDef * @returns {Int} */ getLocalVarInfoIndex: function(varIndexOrNameOrDef) { var result = -1; var localVarInfos = this.getActualLocalVarInfos(); if (typeof (varIndexOrNameOrDef) === 'number') result = varIndexOrNameOrDef; else // if (varIndexOrNameOrDef instanceof Kekule.Spectroscopy.SpectrumVarDefinition) { var symbol = varIndexOrNameOrDef.getSymbol? varIndexOrNameOrDef.getSymbol(): varIndexOrNameOrDef; for (var i = 0, l = localVarInfos.length; i < l; ++i) { /* var varDef = localVarInfos[i].varDef; if (varDef === varIndexOrNameOrDef || varDef.getSymbol() === varIndexOrNameOrDef) { result = i; break; } */ if (symbol === localVarInfos[i].symbol) { result = i; break; } } } return result; }, /** * Returns the local information of variable. * @param {Variant} varIndexOrNameOrDef * @returns {Hash} */ getLocalVarInfo: function(varIndexOrNameOrDef) { var index = this.getLocalVarInfoIndex(varIndexOrNameOrDef); var result = (index >= 0)? this.getActualLocalVarInfos()[index]: null; /* if (result) { var parent = this.getParentSpectrum(); if (parent) { var symbol = result.symbol; result = Object.create(result); // avoid affect the original hash object result.varDef = parent.getVariable(symbol); } } */ return result; /* var result; var localVarInfos = this.getActualLocalVarInfos(); if (typeof (varIndexOrNameOrDef) === 'number') result = localVarInfos[varIndexOrNameOrDef]; else // if (varIndexOrNameOrDef instanceof Kekule.Spectroscopy.SpectrumVarDefinition) { for (var i = 0, l = localVarInfos.length; i < l; ++i) { var varDef = localVarInfos[i].varDef; if (varDef === varIndexOrNameOrDef || varDef.getSymbol() === varIndexOrNameOrDef) { result = localVarInfos[i]; break; } } } return result; */ }, /** * Returns the local information value of a variable. * @param {Variant} varIndexOrNameOrDef * @param {String} key * @returns {Variant} */ getLocalVarInfoValue: function(varIndexOrNameOrDef, key) { var info = this.getLocalVarInfo(varIndexOrNameOrDef); return info && info[key]; }, /** * Set a local information of variable. * @param {Variant} varIndexOrNameOrDef * @param {String} key * @param {Variant} value */ setLocalVarInfoValue: function(varIndexOrNameOrDef, key, value) { var info = this.getLocalVarInfo(varIndexOrNameOrDef); info[key] = value; }, /** * Returns the variable definition of a local variable. * @param {Variant} varIndexOrNameOrDef * @returns {Kekule.Spectroscopy.SpectrumVarDefinition} */ getLocalVarDef: function(varIndexOrNameOrDef) { //return this.getLocalVarInfoValue(varIndexOrNameOrDef, 'varDef'); var symbol = this.getLocalVarInfoValue(varIndexOrNameOrDef, 'symbol'); var parent = this.getParentSpectrum(); return parent && parent.getVariable(symbol); }, /** * Returns the local variable info of certain dependency. * @param {Int} dependency * @returns {Array} */ getLocalVarInfoOfDependency: function(dependency) { var result = []; var localVarInfos = this.getActualLocalVarInfos(); for (var i = 0, l = localVarInfos.length; i < l; ++i) { var varDef = this.getLocalVarDef(i); if (varDef.getDependency() === dependency) { var info = Object.extend({}, localVarInfos[i]); info.varDef = varDef; result.push(info); } } return result; }, /** * Returns the from/to value of a continuous variable. * @param {Variant} varNameOrIndexOrDef * @returns {Hash} Hash of {fromValue, toValue} */ getContinuousVarRange: function(varIndexOrNameOrDef) { var parent = this.getParent(); var varInfo = this.getLocalVarInfo(varIndexOrNameOrDef); return varInfo.continuousRange || (parent && parent.getContinuousVarRange && parent.getContinuousVarRange(varInfo.symbol)); /* var result = this.getLocalVarInfoValue(varIndexOrNameOrDef, 'continuousRange'); if (!result) { var parent = this.getParent(); result = parent && parent.getContinuousVarRange(varInfo.varDef); } return result; */ }, /** * Set the from/to value of a variable and mark it as a continuous one. * @param {Variant} varNameOrIndexOrDef * @param {Number} fromValue * @param {Number} toValue */ setContinuousVarRange: function(varIndexOrNameOrDef, fromValue, toValue) { /* var varInfo = this.getLocalVarInfo(varIndexOrNameOrDef); varInfo.range = {'fromValue': fromValue, 'toValue': toValue}; */ this.setLocalVarInfoValue(varIndexOrNameOrDef, 'continuousRange', {'fromValue': fromValue, 'toValue': toValue}); return this; }, /** * Remove the continuous information of a variable. * @param {Variant} varIndexOrNameOrDef */ clearContinuousVarRange: function(varIndexOrNameOrDef) { /* var varInfo = this.getLocalVarInfo(varIndexOrNameOrDef); varInfo.range = null; */ this.setLocalVarInfoValue(varIndexOrNameOrDef, 'continuousRange', null); return this; }, /** * Set the local default value of a variable when the concrete value in spectrum is absent. * @param {Variant} varIndexOrNameOrDef * @param {Number} value */ setDefaultVarValue: function (varIndexOrNameOrDef, value) { this.setLocalVarInfoValue(varIndexOrNameOrDef, 'defaultValue', value); return this; }, /** * Clear the local default value of a variable. * @param {Variant} varIndexOrNameOrDef */ clearDefaultVarValue: function(varIndexOrNameOrDef) { return this.setDefaultVarValue(varIndexOrNameOrDef, null); }, /** * Get the local default value of a variable when the concrete value in spectrum is absent. * @param {Variant} varIndexOrNameOrDef * @returns {Number} */ getDefaultVarValue: function(varIndexOfNameOrDef) { var result = this.getLocalVarInfoValue(varIndexOfNameOrDef, 'defaultValue'); if (Kekule.ObjUtils.isUnset(result)) { var varInfo = this.getLocalVarInfo(varIndexOfNameOrDef); var parent = this.getParent(); result = parent && parent.getDefaultVarValue(varInfo.symbol); } return result; }, /** * Returns the range when displaying spectrum of a variable. * @param {Variant} varNameOrIndexOrDef * @param {Hash} options May include fields: * { * autoCalc: Bool. If true, when explicit display range is not set, the number range of variable will be calculated and returned. * basedOnInternalUnit: Bool. If true, the returned value will be based on internal unit rather than the external unit of variable. * } * @returns {Hash} Hash of {min, max} */ getVarDisplayRange: function(varIndexOrNameOrDef, options) { var op = options || {}; //var varDef = this.getVar var varIndex = this.getLocalVarInfoIndex(varIndexOrNameOrDef); var info = this.getLocalVarInfo(varIndex); var result = info.displayRange? Object.extend({}, info.displayRange): null; // avoid affect the original values if (!result) // check the var definition { //var varDef = info.varDef; var varDef = this.getLocalVarDef(varIndex); var varDefRange = varDef.getInfoValue('displayRange'); if (varDefRange) result = Object.extend({}, varDefRange); // avoid affecting the original values } if (!result && op.autoCalc) result = this.calcDataRange(varIndex, {basedOnInternalUnit: true})[info.symbol]; // get range with internal unit first //result = this.calcDataRange(varIndexOrNameOrDef)[info.varDef.getSymbol()]; // do not forget to do unit conversion if necessary if (!op.basedOnInternalUnit) { result = this._convertDataRangeToExternalUnit(result, varIndex); /* var fieldNames = Kekule.ObjUtils.getOwnedFieldNames(result); for (var i = 0, l = fieldNames.length; i < l; ++i) { var fname = fieldNames[i]; //result[fname] = this._convertVarValueToExternal(result[fname], varIndex); } // after conversion, the min/max values may be reversed if (result && result.min > result.max) { var temp = result.min; result.min = result.max; result.max = temp; } */ } return result; }, /** * Set the range when displaying spectrum of a variable. * @param {Variant} varNameOrIndexOrDef * @param {Number} minValue * @param {Number} maxValue * @param {Hash} options Extra options, may include fields: * { * basedOnExternalUnit: Bool * } */ setVarDisplayRange: function(varIndexOrNameOrDef, minValue, maxValue, options) { var op = options || {}; var range = {'min': minValue, 'max': maxValue}; if (op.basedOnExternalUnit) // need to convert values to internal unit first { var varIndex = this.getLocalVarInfoIndex(varIndexOrNameOrDef); range = this._convertDataRangeToInternalUnit(range, varIndex); } this.setLocalVarInfoValue(varIndexOrNameOrDef, 'displayRange', range); return this; }, /** * Remove the display range information of a variable. * @param {Variant} varIndexOrNameOrDef */ clearVarDisplayRange: function(varIndexOrNameOrDef) { this.setLocalVarInfoValue(varIndexOrNameOrDef, 'displayRange',null); return this; }, /** * Returns display range of variables. * @param {Array} targetVariables Array of variable definition or symbol. * If not set, all variables will be considered. * @param {Hash} options May include fields: * { * autoCalc: Bool. If true, when explicit display range is not set, the number range of variable will be calculated and returned. * basedOnInternalUnit: Bool. If true, the returned value will be based on internal unit rather than the external unit of variable. * } * @returns {Hash} */ getDisplayRangeOfVars: function(targetVariables, options) { var result = {}; if (!targetVariables) targetVariables = this.getLocalVarSymbols(); for (var i = 0, l = targetVariables.length; i < l; ++i) { var symbol = this._varToVarSymbol(targetVariables[i]); result[symbol] = this.getVarDisplayRange(targetVariables[i], options); } return result; }, /** @private */ _varToVarSymbol: function(targetVar) { /* var info = this.getLocalVarInfo(targetVar); if (info) return info.varDef.getSymbol(); */ var varDef = this.getLocalVarDef(targetVar); if (varDef) return varDef.getSymbol(); else return null; }, /** @private */ _varToVarSymbols: function(targetVariables) { var targetVarSymbols = []; var vars = targetVariables? AU.toArray(targetVariables): null; if (!vars) targetVarSymbols = this.getLocalVarSymbols(); else { for (var i = 0, l = vars.length; i < l; ++i) { targetVarSymbols.push(this._varToVarSymbol(vars[i])) } } return targetVarSymbols; }, /** @private */ _getDefaultPeakRoot: function() { var result = {}; var varInfos = this.getActualLocalVarInfos(); for (var i = 0, l = varInfos.length; i < l; ++i) { //var varDef = varInfos[i].varDef; var varDef = this.getLocalVarDef(i); if (varDef.getDependency() !== Kekule.VarDependency.INDEPENDENT) { result[varDef.getSymbol()] = 0; } } return result; }, /** * Iterate all data items and calculate the min/max value of each variable. * Note this function will always returns the value based on internal unit, * regardless of whether the external unit is set or not. * @param {Array} targetVariables Array of variable definition or symbol. * If not set, all variables will be calculated. * @param {Hash} options Extra calculation options, may include fields: * { * basedOnInternalUnit: Bool. If true, the returned value will be based on internal unit rather than the external unit of variable. * ignorePeakRoot: Bool. If true, the peak root value will be ignored during calculation. * } * @returns {Hash} */ calcDataRange: function(targetVariables, options) { var op = options || {}; // since calculation of data range is a time-consuming job, here we cache the result var targetVarSymbols = this._varToVarSymbols(targetVariables); var notNum = function (v) { return !Kekule.NumUtils.isNormalNumber(v); }; var ranges = {}; var rangeCache = this._cache.ranges; if (!rangeCache) { rangeCache = {}; this._cache.ranges = rangeCache; } var remainingVarSymbols = []; for (var i = 0, l = targetVarSymbols.length; i < l; ++i) { var symbol = targetVarSymbols[i]; if (rangeCache[symbol]) // cached { // console.log('got range from cache', symbol); ranges[symbol] = Object.extend({}, rangeCache[symbol]); } else remainingVarSymbols.push(symbol); } if (remainingVarSymbols.length) { var self = this; var isPeakData = this.isPeakSection(); this.forEach(function (dataValue, index) { for (var i = 0, l = remainingVarSymbols.length; i < l; ++i) { var symbol = remainingVarSymbols[i]; if (notNum(dataValue[symbol])) continue; if (!ranges[symbol]) ranges[symbol] = {}; ranges[symbol].min = notNum(ranges[symbol].min) ? dataValue[symbol] : Math.min(ranges[symbol].min, dataValue[symbol]); ranges[symbol].max = notNum(ranges[symbol].max) ? dataValue[symbol] : Math.max(ranges[symbol].max, dataValue[symbol]); // consider peak root value if (isPeakData && !op.ignorePeakRoot) { var peakRootValue = self.getPeakRootValueOf(dataValue); if (peakRootValue && !notNum(peakRootValue[symbol])) { ranges[symbol].min = notNum(ranges[symbol].min) ? peakRootValue[symbol] : Math.min(ranges[symbol].min, peakRootValue[symbol]); ranges[symbol].max = notNum(ranges[symbol].max) ? peakRootValue[symbol] : Math.max(ranges[symbol].max, peakRootValue[symbol]); } } } }, null, {basedOnInternalUnit: true}); // here we use the internal unit, to keep the cache with the same unit // cache the range values for (var i = 0, l = remainingVarSymbols.length; i < l; ++i) { var symbol = remainingVarSymbols[i]; rangeCache[symbol] = Object.extend({}, ranges[symbol]); } } /* if (this.getMode() === Kekule.Spectroscopy.DataMode.PEAK) // consider the peak root { var peakRoot = this.getDefPeakRoot() || this._getDefaultPeakRoot(); for (var i = 0, l = targetVarSymbols.length; i < l; ++i) { var symbol = targetVarSymbols[i]; var rootValue = peakRoot[symbol]; if (!notNum(rootValue)) { ranges[symbol].min = Math.min(ranges[symbol].min, rootValue); ranges[symbol].max = Math.max(ranges[symbol].max, rootValue); } } } */ //console.log(this.getMode(), peakRoot, ranges); if (!op.basedOnInternalUnit) { for (var i = 0, l = targetVarSymbols.length; i < l; ++i) { var symbol = targetVarSymbols[i]; ranges[symbol] = this._convertDataRangeToExternalUnit(ranges[symbol], i); } } return ranges; }, /** @private */ _convertDataRangeToExternalUnit: function(range, varIndex) { if (!range) return range; var fieldNames = ['min', 'max']; for (var i = 0, l = fieldNames.length; i < l; ++i) { var fname = fieldNames[i]; range[fname] = this._convertVarValueToExternal(range[fname], varIndex); } // after conversion, the min/max values may be reversed if (range.min > range.max) { var temp = range.min; range.min = range.max; range.max = temp; } return range; }, /** @private */ _convertDataRangeToInternalUnit: function(range, varIndex) { if (!range) return range; var fieldNames = ['min', 'max']; for (var i = 0, l = fieldNames.length; i < l; ++i) { var fname = fieldNames[i]; range[fname] = this._convertVarValueToInternal(range[fname], varIndex); } // after conversion, the min/max values may be reversed if (range.min > range.max) { var temp = range.min; range.min = range.max; range.max = temp; } return range; }, /** * Iterate all data items and calculate the average value of each variable. * Note this function will always returns the value based on internal unit, * regardless of whether the external unit is set or not. * @param {Array} targetVariables Array of variable definition or symbol. * If not set, all variables will be calculated. * @param {Hash} options Extra calculation options, may include fields: * { * basedOnInternalUnit: Bool. If true, the returned value will be based on internal unit rather than the external unit of variable. * } * @returns {Hash} */ calcDataAverage: function(targetVariables, options) { var op = options || {}; var targetVarSymbols = this._varToVarSymbols(targetVariables); var averages = {}; var averageCache = this._cache.averages; var notNum = function (v) { return !Kekule.NumUtils.isNormalNumber(v); }; if (!averageCache) { averageCache = {}; this._cache.averages = averageCache; } var remainingVarSymbols = []; for (var i = 0, l = targetVarSymbols.length; i < l; ++i) { var symbol = targetVarSymbols[i]; if (!notNum(averageCache[symbol])) // cached { averages[symbol] = averageCache[symbol]; } else remainingVarSymbols.push(symbol); } if (remainingVarSymbols.length) { var sums = {}; var counts = {}; for (var i = 0, l = remainingVarSymbols.length; i < l; ++i) { sums[remainingVarSymbols[i]] = 0; counts[remainingVarSymbols[i]] = 0; } this.forEach(function (dataValue, index) { for (var i = 0, l = remainingVarSymbols.length; i < l; ++i) { var symbol = remainingVarSymbols[i]; var value = dataValue[symbol]; if (notNum(value)) continue; sums[symbol] += value; ++counts[symbol]; } }, null, {basedOnInternalUnit: true}); // cache the average values for (var i = 0, l = remainingVarSymbols.length; i < l; ++i) { var symbol = remainingVarSymbols[i]; averages[symbol] = sums[symbol] / counts[symbol]; averageCache[symbol] = averages[symbol]; } } if (!op.basedOnInternalUnit) { for (var i = 0, l = targetVarSymbols.length; i < l; ++i) { var symbol = targetVarSymbols[i] averages[symbol] = this._convertVarValueToExternal(averages[symbol, i]); } } return averages; }, /** * Returns the symbols of continuous variable. * @returns {Array} */ getContinuousVarSymbols: function() { var result = []; var varInfos = this.getActualLocalVarInfos(); for (var i = 0, l = varInfos.length; i < l; ++i) { if (this.getContinuousVarRange(i)) result.push(varInfos[i].symbol); } return result; }, /** @private */ _itemHashToArray: function(hashValue) { if (!hashValue) return null; var result = []; var symbols = this.getLocalVarSymbols(); for (var i = 0, l = symbols.length; i < l; ++i) { result.push(hashValue[symbols[i]]); } // then the extra fields if (hashValue._extra) result._extra = hashValue._extra; else { // then the remaining fields of hashValue, storing in _extra field of array item var remainingFields = AU.exclude(Kekule.ObjUtils.getOwnedFieldNames(hashValue, false), symbols); if (remainingFields.length) result._extra = {}; for (var i = 0, l = remainingFields.length; i < l; ++i) { result._extra[remainingFields[i]] = hashValue[remainingFields[i]]; } } return result; }, /** @private */ _itemArrayToHash: function(arrayValue, options) { if (!arrayValue) return null; var result = {}; var symbols = this.getLocalVarSymbols(); for (var i = 0, l = Math.min(symbols.length, arrayValue.length); i < l; ++i) { var value; if (!options.basedOnInternalUnit) value = this._convertVarValueToExternal(arrayValue[i], i); else value = arrayValue[i]; result[symbols[i]] = value; }// if (arrayValue._extra) { //result = Object.extend(result, arrayValue._extra); result._extra = arrayValue._extra; } return result; }, /** @private */ _convertVarValueToNewUnit: function(value, varDef, fromUnit, toUnit) { if (!Kekule.NumUtils.isNormalNumber(value)) // not a number, usually can not be converted return value; //return Kekule.UnitUtils.convertValue(value, fromUnit, toUnit); return Kekule.Spectroscopy.DataValueConverterManager.doConvert(value, varDef, fromUnit, toUnit, this, this.getParent()); }, /** * Convert a raw value (storaged value) to the one exposed to external with a different unit. * @param {Number} value * @param {Int} varIndex * @returns {Number} value * @private */ _convertVarValueToExternal: function(value, varIndex) { var result = value; var varDef = this.getLocalVarDef(varIndex); if (varDef && varDef.hasDifferentExternalUnit && varDef.hasDifferentExternalUnit()) // need to do a value conversion { result = this._convertVarValueToNewUnit(value, varDef, varDef.getInternalUnit(), varDef.getActualExternalUnit()); } return result; }, /** * Convert a value with external unit to the one with internal unit. * @param {Number} value * @param {Int} varIndex * @returns {Number} value * @private */ _convertVarValueToInternal: function(value, varIndex) { var result = value; var varDef = this.getLocalVarDef(varIndex); if (varDef && varDef.hasDifferentExternalUnit && varDef.hasDifferentExternalUnit()) // need to do a value conversion { result = this._convertVarValueToNewUnit(value, varDef, varDef.getActualExternalUnit(), varDef.getInternalUnit()); } return result; }, /** * Returns whether data in this section has been sorted. * @returns {Bool} */ isDataSorted: function() { return this._sorted || this.getDataCount() <= 1; }, /** * Manually set the sorted state of data. * @param {Bool} value */ setDataSorted: function(value) { this._sorted = !!value; return this; }, /** * Sort all data items. * @param {Func} func Optional, func(hash1, hash2). If not set, data items will be sorted by default method. */ sort: function(func) { if (this.isDataSorted()) return; var self = this; var sortFunc = func? function(a1, a2) { return func(self._itemArrayToHash(a1), self._itemArrayToHash(a2)); }: function(a1, a2) { return AU.compare(a1, a2); } this.getDataItems().sort(sortFunc); this.setDataSorted(true); return this; }, /** * Returns the count of data items. * @returns {Int} */ getDataCount: function() { return this.getDataItems().length; }, /** @private */ clearCache: function() { this._cache = {}; }, /** * Notify the data of this section has been changed. * @private */ notifyDataChange: function() { var items = this.getDataItems(); this.setDataSorted(false); this.clearCache(); this.notifyPropSet('dataItems', items); this.invokeEvent('dataChange', {'data': items}) }, /** * Clear all data items. */ clear: function() { this.setDataItems([]); this.notifyDataChange(); this.setDataSorted(true); // empty data are always sorted }, /** * Add new data item. The item is can be a hash or an array. * If it is a hash, the hash fields must matches {@link Kekule.Spectroscopy.SpectrumData.independentVars} and {@link Kekule.Spectroscopy.SpectrumData.dependentVars}. * If it is an array, the values in array will automatically mapped to independent and dependent vars. * @param {Variant} item */ appendData: function(item) { var d; if (!DataType.isArrayValue(item)) // is hash value, convert it to array first d = this._itemHashToArray(item); else d = item; if (d) { var items = this.getDataItems(); items.push(d); this.notifyDataChange(); return d; } }, /** * Remove a data item. * @param {Array} item */ removeData: function(item) { var items = this.getDataItems(); var index = items.indexOf(item); return this.removeDataItemAt(index); }, /** * Remove a data item at index. * @param {Int} index */ removeDataAt: function(index) { var result = this.getDataItems().splice(index, 1); this.notifyDataChange(); return result; }, /** * Get the data value at index. * @param {Int} index * @returns {Array} The arrayed form of value. */ getRawValueAt: function(index) { var rawValue = this.getDataItems()[index]; if (rawValue) { var result = AU.clone(rawValue); var isContinousData = this.getMode() === Kekule.Spectroscopy.DataMode.CONTINUOUS; //if (this.getMode() === Kekule.Spectroscopy.DataMode.CONTINUOUS) { var dataIntervalCount = this.getDataCount() - 1; // check if there are omitted values for (var i = 0, l = result.length; i < l; ++i) { var v = result[i]; if (DataType.isUndefinedValue(v) || DataType.isNullValue(v)) // maybe omitted? check if it is a continous variable or it has a default value { var defValue = this.getDefaultVarValue(i); if (Kekule.ObjUtils.notUnset(defValue)) v = defValue; else if (isContinousData) { var range = this.getContinuousVarRange(i); if (range) { v = (dataIntervalCount > 1)? ((index / dataIntervalCount) * (range.toValue - range.fromValue) + range.fromValue): range.fromValue; //console.log('adjusted v', v, range); } } } result[i] = v; } } if (rawValue._extra) // copy the extra properties result._extra = rawValue._extra; return result; } else return null; }, /** @private */ getHashValueAt: function(index, options) { return this._itemArrayToHash(this.getRawValueAt(index), options || {}); }, /** * Get the data value at index. * @param {Int} index * @returns {Hash} The hashed form of value. */ getValueAt: function(index, options) { return this.getHashValueAt(index, options); }, /** * Set the data value at index. * @param {Int} index * @param {Array} The arrayed form of value. */ setRawValueAt: function(index, value) { var oldValue = this.getDataItems()[index]; if (oldValue && oldValue._extra) this._extraInfoRemoved(oldValue._extra); this.getDataItems()[index] = value; if (value._extra) { this._extraInfoAdded(value._extra); } return this; }, /** @private */ setHashValueAt: function(index, value, options) { var aValue = this._itemHashToArray(value); this.setRawValueAt(index, aValue); return this; }, /** * Set the data value at index. * @param {Int} index * @param {Variant} value Value in hash or array form. */ setValueAt: function(index, value, options) { var d; if (!DataType.isArrayValue(value)) // is hash value, convert it to array first d = this._itemHashToArray(value); else d = value; this.setRawValueAt(index, d); return this; }, /** * Get the extra information of a data value. * @param {Variant} value Data value in hash or array form. * @returns {Hash} */ getExtraInfoOf: function(value) { return value._extra; }, /** * Set the extra information of a data value. * @param {Variant} value Data value in hash or array form. * @param {Hash} info */ setExtraInfoOf: function(value, info) { if (value._extra) this._extraInfoRemoved(value._extra); value._extra = info; this._extraInfoAdded(info); return this; }, /** * Get the extra information of data value at index. * @param {Int} index * @returns {Hash} */ getExtraInfoAt: function(index) { var d = this.getDataItems()[index]; return d && d._extra; }, /** * Set the extra information of data value at index. * @param {Int} index * @param {Hash} info */ setExtraInfoAt: function(index, info) { var d = this.getDataItems()[index]; if (d._extra) this._extraInfoRemoved(d._extra); d._extra = info; this._extraInfoAdded(info); return this; }, /** @private */ _extraInfoAdded: function(extraInfo) { if (extraInfo && extraInfo instanceof Kekule.ChemObject) { extraInfo.setParent(this); extraInfo.setOwner(this.getOwner()); } }, /** @private */ _extraInfoRemoved: function(extraInfo) { if (extraInfo && extraInfo instanceof Kekule.ChemObject && extraInfo.getParent() === this) { extraInfo.setParent(null); extraInfo.setOwner(null); } }, /** * Returns the peak root value of data item value. * @param {Hash} value * @returns {Hash} */ getPeakRootValueOf: function(value) { if (this.getMode() !== Kekule.Spectroscopy.DataMode.PEAK) return null; else { var pr = this.getDefPeakRoot() || this._getDefaultPeakRoot(); return Object.extend(Object.extend({}, value), pr); } }, /** * Returns the peak root value of data item at index. * @param {Int} index * @returns {Hash} */ getPeakRootValueAt: function(index) { return this.getPeakRootValueOf(this.getValueAt(index)); }, /** * Calculate values of dependant variable values from independent variable values. * @param {Hash} independentValues * @param {Hash} extraOptions * @returns {Hash} */ getDependentValues: function(independentValues, extraOptions) { return this.doGetDependentValues(independantValues, extraOptions); }, /** * Do actual work of {@link Kekule.Spectroscopy.SpectrumData.getDependentValues}. * Descendants should override this method. * @param {Hash} independentValues * @param {Hash} extraOptions * @returns {Hash} * @private */ doGetDependentValues: function(independentValues, extraOptions) { return {}; }, /** * Returns an iterator to iterate all data in this object. * If iterator is not available, null should be returned. * Otherwise, the return value should be an object with method next(): {done, value}. * @returns {Object} */ getIterator: function() { return this.doGetIterator(); }, /** * Do actual work of {@link Kekule.Spectroscopy.SpectrumData.getIterator}. * Desendants may override this method. * @returns {Object} * @private */ doGetIterator: function() { var dataItems = this.getDataItems(); var self = this; var result = { index: 0, next: function(options) { if (this.index >= dataItems.length) return {'done': true}; else { var ret = {'done': false, 'value': /*self._itemArrayToHash(dataItems[this.index])*/self.getHashValueAt(this.index, options)}; ++this.index; return ret; } } }; return result; }, /** * Call function to each data item. * @param {Func} func With params: (hashValue [, index, options]). */ forEach: function(func, thisArg, options) { var iterator = this.getIterator(); if (iterator) { //var dataItems = this.getDataItems(); var index = 0; var nextItem = iterator.next(options); while (!nextItem.done) { func.apply(thisArg, [nextItem.value, index]); ++index; nextItem = iterator.next(); } } return this; } }); /** * The base spectrum data class. * The concrete data can be stored in different forms, implemented in different descendant classes. * @class * @augments ObjectEx * * @param {String} id * @param {Array} variables Array of variables of data, each item is {@link Kekule.Spectroscopy.SpectrumVarDefinition}. * * @property {Array} variables Array of variables of data, each item is {@link Kekule.Spectroscopy.SpectrumVarDefinition}. * @property {Kekule.ChemObjList} sections Child data sections. * @property {Kekule.Spectroscopy.SpectrumData} activeSection Active data section to read/write data. * @property {Bool} autoCreateSection Whether create a initial data section automatically when inserting data. */ Kekule.Spectroscopy.SpectrumData = Class.create(ObjectEx, /** @lends Kekule.Spectroscopy.SpectrumData# */ { /** @private */ CLASS_NAME: 'Kekule.Spectroscopy.SpectrumData', /** @private */ initialize: function (id, variables, parent) { //this.setPropStoreFieldValue('dataItems', []); this.tryApplySuper('initialize', [id]); this.setPropStoreFieldValue('variables', variables ? AU.clone(variables) : []); var sections = new Kekule.ChemObjList(null, Kekule.Spectroscopy.SpectrumDataSection, true); this.setPropStoreFieldValue('sections', sections); this.setParent(parent); sections.setOwner(this.getOwner()); //this.createSection(this.getVariables()); // create a default section }, doFinalize: function () { //this.clear(); this.getSections().finalize(); var variables = this.getVariables() || []; for (var i = 0, l = variables.length; i < l; ++i) { variables[i].finalize(); } this.setPropStoreFieldValue('variables', null); this.tryApplySuper('doFinalize'); }, /** @private */ initProperties: function () { this.defineProp('owner', { 'dataType': 'Kekule.ChemSpace', 'serializable': false, 'scope': Class.PropertyScope.PUBLIC, 'getter': function() { return this.getPropStoreFieldValue('owner') || this._getDefaultOwner(); }, 'setter': function(value) { if (value !== this.getPropStoreFieldValue('owner')) { this.setPropStoreFieldValue('owner', value); var newOwner = this.getOwner(); var sections = this.getSections(); if (sections) sections.setOwner(newOwner); } } }); this.defineProp('parent', { 'dataType': 'Kekule.Spectroscopy.Spectrum', 'serializable': false, 'setter': function (value) { this.setPropStoreFieldValue('parent', value); var sections = this.getSections(); if (sections) sections.setParent(this.getParent() || this); } }); this.defineProp('sections', { 'dataType': 'Kekule.ChemObjList', 'setter': function (value) { var old = this.getSections(); if (old !== value) { if (old) { old.finalize(); } if (value) { value._transparent = true; // force the obj list be transparent value.setParent(this.getParent() || this); value.setOwner(this.getOwner()); } this.setPropStoreFieldValue('sections', value); } } }); this.defineProp('autoCreateSection', {'dataType': DataType.BOOL}); this.defineProp('activeSectionIndex', { 'dataType': DataType.INT, 'getter': function () { if (this.getSectionCount() <= 0) return -1; else if (this.getSectionCount() === 1) // only one section, it should be activated by default return 0; else return this.getPropStoreFieldValue('activeSectionIndex'); }, 'setter': function (value) { if (value >= 0 && value <= this.getSectionCount()) this.setPropStoreFieldValue('activeSectionIndex', value); } }); this.defineProp('activeSection', { 'dataType': 'Kekule.Spectroscopy.SpectrumDataSection', 'serializable': false, 'getter': function () { var result = this.getSectionAt(this.getActiveSectionIndex() || 0); if (!result && this.getSectionCount() <= 0 && this.getAutoCreateSection()) { result = this.createSection(this.getVariables()); //console.log('auto create'); } return result; }, 'setter': function (value) { this.setActiveSectionIndex(this.indexOfSection(value)); } }); /* this.defineProp('variables', {'dataType': DataType.ARRAY, 'setter': null, 'serializable': false, 'getter': function() { var result = []; for (var i = 0, l = this.getSectionCount(); i < l; ++i) { var vars = this.getSectionAt(i).getVariables(); AU.pushUnique(result, vars); } return result; } }); */ this.defineProp('variables', {'dataType': DataType.ARRAY/*, 'setter': null*/}); // private, stores the data items, each item is a hash, e.g. {x: 1, y: 10, w: 2} //this.defineProp('dataItems', {'dataType': DataType.ARRAY, 'setter': null, 'scope': PS.PRIVATE}); // private, cache all variable names this.defineProp('varSymbols', { 'dataType': DataType.ARRAY, 'setter': null, 'scope': PS.PRIVATE, 'getter': function () { var result = []; var list = this.getVariables() || []; for (var j = 0, jj = list.length; j < jj; ++j) { var varDef = list[j]; result.push(varDef.getSymbol()); } return result; } }); this.defineProp('mode', {'dataType': DataType.INT, 'enumSource': Kekule.Spectroscopy.DataMode}); }, /** @ignore */ initPropValues: function () { this.tryApplySuper('initPropValues'); this.setAutoCreateSection(true); this.setMode(Kekule.Spectroscopy.DataMode.CONTINUOUS); }, /** @private */ _getDefaultOwner: function() { var parent = this.getParent(); return parent && parent.getOwner && parent.getOwner(); // always returns the owner of parent spectrum }, /** @private */ getHigherLevelObj: function () { return this.getParent(); }, /** @ignore */ getChildHolder: function () { return this.getSections(); }, /** @ignore */ loaded: function(/*$super*/) { var sections = this.getSections(); if (sections) { sections.parentChanged(this); sections.ownerChanged(this.getOwner()); } this.tryApplySuper('loaded'); }, /** * Create and append a new {@link Kekule.Spectroscopy.SpectrumDataSection}. * @param {Array} variables Array of local variable symbol or definition used by secion. * @param {Int} mode * @returns {Kekule.Spectroscopy.SpectrumDataSection} */ createSection: function (variables, mode) { var result = new Kekule.Spectroscopy.SpectrumDataSection(null, this, variables); //result.setVariables(variables); result.setMode(mode || this.getMode()); this.getSections().appendChild(result); return result; }, /** * Remove all data sections. */ clearSection: function () { var sections = this.getChildren(); for (var i = 0, l = sections.length; i < l; ++i) { sections[i].clear(); sections[i].setParent(null); sections[i].finalize(); } this.getSections().clear(); }, /** * Get count of child data sections. * @returns {Int} */ getSectionCount: function () { return this.getSections().getChildCount(); }, /** * Get child data sectionb at index. * @param {Int} index * @returns {Kekule.Spectroscopy.SpectrumDataSection} */ getSectionAt: function (index) { return this.getSections().getItemAt(index); }, /** * Get the index of child section in children list. * @param {Kekule.Spectroscopy.SpectrumDataSection} section * @returns {Int} Index of section or -1 when not found. */ indexOfSection: function (section) { return this.getSections().indexOfItem(section); }, /** * Check if section is in this spectrum data. * @param {Kekule.Spectroscopy.SpectrumDataSection} section * @returns {Bool} */ hasSection: function (section) { return this.indexOfSection(section) >= 0; }, /** * Remove a data section at index. * @param {Int} index * @returns {Kekule.Spectroscopy.SpectrumDataSection} Child section removed. */ removeSectionAt: function (index) { return this.getSections().removeItemAt(index); }, /** * Remove a child data section. * @param {Kekule.Spectroscopy.SpectrumDataSection} section * @returns {Kekule.Spectroscopy.SpectrumDataSection} Section object removed. */ removeSection: function (section) { return this.getSections().removeItem(section); }, /** * Insert a new section to index. * @param {Kekule.Spectroscopy.SpectrumDataSection} section * @param {Int} index * @return {Int} Index of section after insertion. */ insertSectionAt: function (section, index) { return this.getSections().insertItemAt(section, index); }, /** * Insert a data section before refSection in data section list. * @param {Kekule.Spectroscopy.SpectrumDataSection} obj * @param {Kekule.Spectroscopy.SpectrumDataSection} refChildr * @return {Int} Index of section after insertion. */ insertSectionBefore: function (section, refSection) { return this.getSections().insertItemBefore(section, refSection); }, /** * Add new data section to the tail of section list. * @param {Kekule.Spectroscopy.SpectrumDataSection} section * @return {Int} Index of obj after appending. */ appendSection: function (section) { return this.getSections().appendChild(section); }, /** * Returns whether multiple sections exists in this spectrum data. * @returns {Bool} */ hasMultipleSections: function () { return this.getSections().getChildCount() > 1; }, /** * Iterate all data items in a section and calculate the min/max value of each variable. * @param {Kekule.Spectroscopy.SpectrumDataSection} section * @param {Array} targetVariables Array of variable definition or symbol. * If not set, all variables will be calculated. * @returns {Hash} */ calcDataRangeOfSection: function (section, targetVariables) { return section.calcDataRange(targetVariables); }, /** * Iterate all data items in a set of sections and calculate the min/max value of each variable. * @param {Array} sections * @param {Array} targetVariables Array of variable definition or symbol. * If not set, all variables will be calculated. * @returns {Hash} */ calcDataRangeOfSections: function (sections, targetVariables) { var result = {}; for (var i = 0, l = sections.length; i < l; ++i) { var range = sections[i].calcDataRange(targetVariables); result = Kekule.Spectroscopy.Utils.mergeDataRange(result, range); } return result; }, /** * Returns the display range of a section. * @param {Kekule.Spectroscopy.SpectrumDataSection} section * @param {Array} targetVariables Array of variable definition or symbol. * If not set, all variables will be calculated. * @param {Hash} options May include fields: * { * autoCalc: Bool. If true, when explicit display range is not set, the number range of variable will be calculated and returned. * basedOnInternalUnit: Bool. If true, the returned value will be based on internal unit rather than the external unit of variable. * } * @returns {Hash} */ getDisplayRangeOfSection: function (section, targetVariables, options) { return section.getDisplayRangeOfVars(targetVariables, options); }, /** * Returns the display range of a set of sections. * @param {Array} sections * @param {Array} targetVariables Array of variable definition or symbol. * If not set, all variables will be calculated. * @param {Hash} options May include fields: * { * autoCalc: Bool. If true, when explicit display range is not set, the number range of variable will be calculated and returned. * basedOnInternalUnit: Bool. If true, the returned value will be based on internal unit rather than the external unit of variable. * } * @returns {Hash} */ getDisplayRangeOfSections: function (sections, targetVariables, options) { var result = {}; for (var i = 0, l = sections.length; i < l; ++i) { var range = sections[i].getDisplayRangeOfVars(targetVariables, options); result = Kekule.Spectroscopy.Utils.mergeDataRange(result, range); } return result; }, /** * Returns count of all variables. * @returns {Int} */ getVariableCount: function () { return (this.getVariables() || []).length; }, /** * Returns the variable definition by a index or variable name. * @param {Variant} varIndexOrNameOrDef * @returns {Kekule.Spectroscopy.SpectrumVarDefinition} */ getVariable: function (varIndexOrNameOrDef) { var varDef = (varIndexOrNameOrDef instanceof Kekule.VarDefinition) ? varIndexOrNameOrDef : (typeof (varIndexOrNameOrDef) === 'number') ? this.getVariables()[varIndexOrNameOrDef] : // index this.getVariables()[this.getVarSymbols().indexOf(varIndexOrNameOrDef)]; // name return varDef; }, /** * Returns the index of a variable definition. * @param {Kekule.Spectroscopy.SpectrumVarDefinition} varDef * @returns {Int} */ indexOfVariable: function (varDef) { return this.getVariables().indexOf(varDef); }, /** * Insert a new variable definition at a specified position. * @param {Kekule.Spectroscopy.SpectrumVarDefinition} varDef * @param {Int} index */ insertVariableAt: function (varDef, index) { if (index >= 0) this.getVariables().splice(index, 0, varDef); else this.getVariables().push(varDef); return this; }, /** * Insert a new variable definition before ref. * @param {Kekule.Spectroscopy.SpectrumVarDefinition} varDef * @param {Kekule.Spectroscopy.SpectrumVarDefinition} ref */ insertVariableBefore: function (varDef, ref) { var index = ref ? this.indexOfVarDefinition(ref) : -1; return this.insertVarDefinitionAt(varDef, index); }, /** * Append a new variable definition. * @param {Kekule.Spectroscopy.SpectrumVarDefinition} varDef */ appendVariable: function (varDef) { return this.insertVariableAt(varDef, -1); }, /** * Remove a variable definition at index. * @param {Int} index */ removeVariableAt: function (index) { this.getVariables().splice(index, 1); return this; }, /** * Remove a variable definition. * @param {Kekule.Spectroscopy.SpectrumVarDefinition} varDef */ removeVariable: function (varDef) { var index = this.indexOfVariable(varDef); if (index >= 0) this.removeVariableAt(index); return this; }, /** * Returns variables of certain dependency. * @param {Int} dependency Value from {@link Kekule.VarDependency} * @returns {Array} Array of var definition. */ getVariablesOfDependency: function (dependency) { var result = []; for (var i = 0, l = this.getVariableCount(); i < l; ++i) { var varDef = this.getVariable(i); if (varDef && varDef.getDependency() === dependency) result.push(varDef); } return result; }, /** * Returns the first/last value of a continuous variable. * @param {Variant} varNameOrIndexOrDef * @returns {Hash} Hash of {firstValue, lastValue} */ getContinuousVarRange: function (varIndexOrNameOrDef) { var varDef = this.getVariable(varIndexOrNameOrDef); var info = varDef && varDef.getInfo(); if (info) { if (info.continuous) { //var count = this.getDateItemCount(); return { 'fromValue': info.fromValue, 'toValue': info.toValue /*, 'interval': (info.lastValue - info.firstValue) / count */ }; } } return null; }, /** * Set the first/last value of a variable and mark it as a continuous one. * @param {Variant} varNameOrIndexOrDef * @param {Number} fromValue * @param {Number} toValue */ setContinuousVarRange: function (varIndexOrNameOrDef, fromValue, toValue) { var varDef = this.getVariable(varIndexOrNameOrDef); var info = varDef && varDef.getInfo(true); info.continuous = true; info.fromValue = fromValue; info.toValue = toValue; return this; }, /** * Remove the continuous information of a variable. * @param {Variant} varIndexOrNameOrDef */ clearContinuousVarRange: function (varIndexOrNameOrDef) { var varDef = this.getVariable(varIndexOrNameOrDef); var info = varDef.getInfo(); if (info && info.continuous) info.continuous = false; return this; }, /** * Set the default value of a variable when the concrete value in spectrum is absent. * E.g., in many NMR peak spectrums, y value will be omitted, and this method will provide a default one for it. * @param {Variant} varIndexOrNameOrDef * @param {Number} value */ setDefaultVarValue: function (varIndexOrNameOrDef, value) { var varDef = this.getVariable(varIndexOrNameOrDef); var info = varDef && varDef.getInfo(true); info.defaultValue = value; return this; }, /** * Clear the default value of a variable. * @param {Variant} varIndexOrNameOrDef */ clearDefaultVarValue: function(varIndexOrNameOrDef) { return this.setDefaultVarValue(varIndexOrNameOrDef, null); }, /** * Get the default value of a variable when the concrete value in spectrum is absent. * E.g., in many NMR peak spectrums, y value will be omitted, and this method will provide a default one for it. * @param {Variant} varIndexOrNameOrDef * @returns {Number} */ getDefaultVarValue: function(varIndexOrNameOrDef) { var varDef = this.getVariable(varIndexOrNameOrDef); var info = varDef && varDef.getInfo(); if (info) { return info.defaultValue; } }, /** * Iterate all child sections and execute function. * @param {Function} func Function with param (section, index). */ iterateSections: function(func) { for (var i = 0, l = this.getSectionCount(); i < l; ++i) { func(this.getSectionAt(i), i); } }, /** * Sort all data items. * @param {Func} func Optional, func(hash1, hash2). If not set, data items will be sorted by default method. */ sort: function(func) { this.iterateSections(function(c){ c.sort(func); }); }, /** * Returns the count of data items. * @returns {Int} */ getDataCount: function() { var result = 0; this.iterateSections(function(c){ result += c.getDataCount(); }); return result; }, /** * Clear all data items in all data sections. */ clearData: function() { this.iterateSections(function(c){ c.clear(); }); }, /** * Add new data item to active data section. The item is can be a hash or an array. * If it is a hash, the hash fields must matches {@link Kekule.Spectroscopy.SpectrumData.independentVars} and {@link Kekule.Spectroscopy.SpectrumData.dependentVars}. * If it is an array, the values in array will automatically mapped to independent and dependent vars. * @param {Variant} item */ appendData: function(item) { return this.getActiveSection().appendData(item); }, /** * Remove a data item from active data section. * @param {Array} item */ removeData: function(item) { return this.getActiveSection().removeData(item); }, /** * Remove a data item at index in current active section. * @param {Int} index */ removeDataAt: function(index) { return this.getActiveSection().removeDataAt(index); }, /** * Get the data value at index in current active section. * @param {Int} index * @returns {Array} The arrayed form of value. */ getRawValueAt: function(index) { return this.getActiveSection().getRawValueAt(index); }, /** @private */ getHashValueAt: function(index) { return this.getActiveSection().getHashValueAt(index); }, /** * Get the data value at index in current active section. * @param {Int} index * @returns {Hash} The hashed form of value. */ getValueAt: function(index) { return this.getHashValueAt(index); }, /** * Set the data value at index in current active section. * @param {Int} index * @param {Array} The array form of value. */ setRawValueAt: function(index, value) { this.getActiveSection().setRawValueAt(index, value); return this; }, /** @private */ setHashValueAt: function(index, value, options) { this.getActiveSection().setHashValueAt(index, value, options); return this; }, /** * Set the data value at index in current active section. * @param {Int} index * @param {Variant} value Value in hash or array form. */ setValueAt: function(index, value, options) { this.getActiveSection().setValueAt(index, value); return this; }, /** * Get the extra information of a data value in current active section. * @param {Variant} value Data value in hash or array form. * @returns {Hash} */ getExtraInfoOf: function(value) { return this.getActiveSection().getExtraInfoOf(value); }, /** * Set the extra information of a data value in current active section. * @param {Variant} value Data value in hash or array form. * @param {Hash} info */ setExtraInfoOf: function(value, info) { this.getActiveSection().setExtraInfoOf(value, info); return this; }, /** * Get the extra information of data value at index of current active section. * @param {Int} index * @returns {Hash} */ getExtraInfoAt: function(index) { return this.getActiveSection().getExtraInfoAt(index); }, /** * Set the extra information of data value at index of current active section. * @param {Int} index * @param {Hash} info */ setExtraInfoAt: function(index, info) { this.getActiveSection().setExtraInfoAt(index, info); return this; }, /** * Calculate values of dependant variable values from independent variable values. * @param {Hash} independentValues * @param {Hash} extraOptions * @returns {Hash} */ getDependentValues: function(independentValues, extraOptions) { return this.doGetDependentValues(independantValues, extraOptions); }, /** * Do actual work of {@link Kekule.Spectroscopy.SpectrumData.getDependentValues}. * Descendants should override this method. * @param {Hash} independentValues * @param {Hash} extraOptions * @returns {Hash} * @private */ doGetDependentValues: function(independentValues, extraOptions) { // TODO: unfinished return {}; }, /** * Returns an iterator to iterate all data in this object. * If iterator is not available, null should be returned. * Otherwise, the return value should be an object with method next(): {done, value}. * @returns {Object} */ getIterator: function() { return this.doGetIterator(); }, /** * Do actual work of {@link Kekule.Spectroscopy.SpectrumData.getIterator}. * Desendants may override this method. * @returns {Object} * @private */ doGetIterator: function() { //var dataItems = this.getDataItems(); var sections = this.getSections().getItems(); var self = this; var result = { sectionIndex: 0, index: 0, next: function() { var self = this; var outOfRange = function() { return (self.sectionIndex >= sections.length || (self.sectionIndex === sections.length - 1 && self.index >= sections[sections.length - 1].getDataCount())); } if (outOfRange()) return {'done': true}; else { if (this.index < sections[this.sectionIndex].getDataCount()) { var ret = {'done': false, 'value': sections[this.sectionIndex].getValueAt(this.index)}; ++this.index; } else { do { ++this.sectionIndex; this.index = 0; } while(this.index >= sections[this.sectionIndex].getDataCount() || self.sectionIndex >= sections.length); if (outOfRange()) return {'done': true}; else return {'done': false, 'value': sections[this.sectionIndex].getValueAt(this.index)}; } return ret; } } }; return result; }, /** * Call function to each data item. * @param {Func} func With params: (hashValue [, index]). */ forEach: function(func, thisArg) { var iterator = this.getIterator(); if (iterator) { var index = 0; var nextItem = iterator.next(); while (!nextItem.done) { func.apply(thisArg, [nextItem.value, index]); ++index; nextItem = iterator.next(); } } return this; } }); /** * Spectrum peak shape enumeration. * @enum */ Kekule.Spectroscopy.PeakShape = { SHARP: 'sharp', BROAD: 'broad' }; /** * Spectrum peak multiplicity enumeration. * @enum */ Kekule.Spectroscopy.PeakMultiplicity = { UNKNOWN: 0, SINGLET: 1, DOUBLET: 2, TRIPLET: 3, QUARTET: 4, QUINTET: 5, SEXTUPLET: 6, MULTIPLET: 255, }; /** * A special class to store the additional peak information (e.g. the assignment ref object of peak). * @class * @augments Kekule.ChemObject * * @param {Hash} params * * @property {Kekule.ChemObject} assignment The assignment target of peak, ususally an atom or a bond. * @property {String} shape Shape of peak, usually be set with value from {@link Kekule.Spectroscopy.PeakShape}. * @property {VARIANT} multiplicity Multiplicity of peak. * Usually be set with value from {@link Kekule.Spectroscopy.PeakMultiplicity}, but a custom string value (e.g. 'triplet121') is also allowed. */ Kekule.Spectroscopy.SpectrumPeakDetails = Class.create(Kekule.ChemObject, /** @lends Kekule.Spectroscopy.SpectrumPeakDetails# */ { /** @private */ CLASS_NAME: 'Kekule.Spectroscopy.SpectrumPeakDetails', /** @private */ initialize: function (params) { this.tryApplySuper('initialize', []); this.setPropValues(params); }, /** @private */ initProperties: function() { this.defineProp('assignment', {'dataType': 'Kekule.ChemObject', 'objRef': true, 'autoUpdate': true}); this.defineProp('shape', {'dataType': DataType.STRING}); this.defineProp('multiplicity', {'dataType': DataType.VARIANT}); } }); /** * Enumeration of spectrum types. * @enum */ Kekule.Spectroscopy.SpectrumType = { NMR: 'NMR', IR: 'IR', MS: 'MS', UV_VIS: 'UV_VIS', IMS: 'IMS', // ION MOBILITY SPECTRUM RAMAN: 'Raman', CHROMATOGRAPHY: 'chromatography', GENERAL: 'general' // unknown type }; /** * Some constants used by NMR spectrum. * @object */ Kekule.Spectroscopy.SpectrumNMR = { TargetNucleus: { C13: 'C13', H: 'H' } }; /** * Some constants used by MS spectrum. * @object */ Kekule.Spectroscopy.SpectrumMS = { SpectrometerType: { } }; /** * The base spectrum class. Concrete spectrum classes should be inherited from this one. * @class * @augments Kekule.ChemObject * * @property {String} spectrumType Type of spectrum, value from {@link Kekule.Spectroscopy.SpectrumType}. * @property {String} name Name of spectrum. * @property {String} title Title of spectrum. * @property {Hash} metaData Meta information of spectrum. * @property {Hash} conditions Conditions of spectrum. * @property {Hash} parameters Important parameters of spectrum. * @property {Hash} annotations Additional annotations of spectrum. * @property {Kekule.Spectroscopy.SpectrumData} data Spectrum data. * @property {Hash} spectrumParams Key spectrum parameters, e,g. the frequency of NMR. */ Kekule.Spectroscopy.Spectrum = Class.create(Kekule.ChemObject, /** @lends Kekule.Spectroscopy.Spectrum# */ { /** @private */ CLASS_NAME: 'Kekule.Spectroscopy.Spectrum', /** @private */ initialize: function(id) { this.setPropStoreFieldValue('data', new Kekule.Spectroscopy.SpectrumData(null, null, this)); this.tryApplySuper('initialize', [id]); this._initDelegatedMethods(); }, /** @ignore */ doFinalize: function() { var d = this.getData(); if (d) d.finalize(); this.tryApplySuper('doFinalize'); }, /** @private */ initProperties: function() { this.defineProp('spectrumType', {'dataType': DataType.STRING}); this.defineProp('name', {'dataType': DataType.STRING}); //this.defineProp('title', {'dataType': DataType.STRING}); this.defineProp('data', {'dataType': 'Kekule.Spectroscopy.SpectrumData', 'setter': function(value) { var old = this.getData(); if (value !== old) { if (old) { old.finalize(); } if (value) { value.setPropValue('parent', this, true); } this.setPropStoreFieldValue('data', value); } } }); /* this.defineProp('spectrumParams', { 'dataType': DataType.HASH, 'getter': function(canCreate) { var r = this.getPropStoreFieldValue('spectrumParams'); if ((!r) && canCreate) { r = {}; this.setPropStoreFieldValue('spectrumParams', r); } return r; }, 'setter': null }); */ this._defineInfoProperty('title'); this._defineInfoProperty('metaData', null, {'dataType': DataType.HASH}); this._defineInfoProperty('conditions', null, {'dataType': DataType.HASH}); this._defineInfoProperty('parameters', null, {'dataType': DataType.HASH}); this._defineInfoProperty('annotations', null, {'dataType': DataType.HASH}); this._defineDataDelegatedProperty('variables'); this._defineDataDelegatedProperty('dataSections', 'sections'); this._defineDataDelegatedProperty('activeDataSectionIndex', 'activeSectionIndex'); this._defineDataDelegatedProperty('activeDataSection', 'activeSection'); }, /** @private */ _initDelegatedMethods: function() { this._defineDataDelegatedMethod('createDataSection', 'createSection'); this._defineDataDelegatedMethod('clearDataSection', 'clearSection'); this._defineDataDelegatedMethod('getDataSectionCount', 'getSectionCount'); this._defineDataDelegatedMethod('getDataSectionAt', 'getSectionAt'); this._defineDataDelegatedMethod('indexOfDataSection', 'indexOfSection'); this._defineDataDelegatedMethod('hasDataSection', 'hasSection'); this._defineDataDelegatedMethod('removeDataSectionAt', 'removeSectionAt'); this._defineDataDelegatedMethod('removeDataSection', 'removeSection'); this._defineDataDelegatedMethod('insertDataSectionAt', 'insertSectionAt'); this._defineDataDelegatedMethod('insertDataSectionBefore', 'insertSectionBefore'); this._defineDataDelegatedMethod('appendDataSection', 'appendSection'); this._defineDataDelegatedMethod('iterateDataSection', 'iterateSection'); this._defineDataDelegatedMethod('sortData', 'sort'); this._defineDataDelegatedMethod('clearData'); this._defineDataDelegatedMethod('getVariable'); this._defineDataDelegatedMethod('indexOfVariable'); this._defineDataDelegatedMethod('insertVariableAt'); this._defineDataDelegatedMethod('insertVariableBefore'); this._defineDataDelegatedMethod('appendVariable'); this._defineDataDelegatedMethod('removeVariableAt'); this._defineDataDelegatedMethod('removeVariable'); this._defineDataDelegatedMethod('getVariablesOfDependency'); this._defineDataDelegatedMethod('getContinuousVarRange'); this._defineDataDelegatedMethod('setContinuousVarRange'); this._defineDataDelegatedMethod('clearContinuousVarRange'); this._defineDataDelegatedMethod('getDefaultVarValue'); this._defineDataDelegatedMethod('setDefaultVarValue'); this._defineDataDelegatedMethod('clearDefaultVarValue'); }, /** * Defines property which storing value in {@link Kekule.ChemObject.info}. * @param {String} propName * @param {String} infoFieldName * @param {Hash} options * @private */ _defineInfoProperty: function(propName, infoFieldName, options) { var defs; (function() { defs = Object.extend({ 'getter': function () { return this.getInfoValue(infoFieldName || propName); }, 'setter': function(value) { this.setInfoValue(infoFieldName || propName, value); }, 'serializable': false }, options); })(); return this.defineProp(propName, defs); }, /** * Defines property which reflecting the property values in {@link Kekule.Spectroscopy.Spectrum.data}. * @param {String} propName * @param {String} dataPropName * @private */ _defineDataDelegatedProperty: function(propName, dataPropName) { if (!dataPropName) dataPropName = propName; var dataPropInfo = ClassEx.getPropInfo(Kekule.Spectroscopy.SpectrumData, dataPropName); var propOptions = Object.create(dataPropInfo); propOptions.getter = null; propOptions.setter = null; propOptions.serializable = false; if (dataPropInfo.getter) { propOptions.getter = function() { return this.getData().getPropValue(dataPropName); }; } if (dataPropInfo.setter) { propOptions.setter = function(value) { this.getData().setPropValue(dataPropName, value); } } return this.defineProp(propName, propOptions); }, /** * Defines method which directly calling the corresponding one in {@link Kekule.Spectroscopy.Spectrum.data}. * @param {String} methodName * @param {String} dataMethodName * @private */ _defineDataDelegatedMethod: function(methodName, dataMethodName) { if (!dataMethodName) dataMethodName = methodName; var proto = ClassEx.getPrototype(this.getClass()); proto[methodName] = function() { //console.log('call', methodName, arguments); return this.getData()[dataMethodName].apply(this.getData(), arguments); } }, /** @private */ ownerChanged: function(/*$super, */newOwner, oldOwner) { // change the owner of child data and sections var data = this.getData(); if (data) data.setOwner(newOwner); this.tryApplySuper('ownerChanged', [newOwner, oldOwner]); }, /** @private */ _getInfoBasedHashPropValue: function(infoKeyName, propName) { var hash = this.getInfoValue(infoKeyName); return hash && hash[propName]; }, /** @private */ _setInfoBasedHashpropValue: function(infoKeyName, propName, value) { var hash = this.getInfoValue(infoKeyName); if (!hash) { hash = {}; this.setInfoValue(infoKeyName, hash); } hash[propName] = value; }, /** @private */ _getAllKeysOfInfoBasedHashProp: function(infoKeyName) { var hash = this.getInfoValue(infoKeyName); return hash? Kekule.ObjUtils.getOwnedFieldNames(hash, false): []; }, /** * Returns value of spectrum meta/condition/parameter/annotation. * @param {String} key * @param {Array} candicateCategories * @returns {Variant} */ getSpectrumInfoValue: function(key, candicateCategories) { if (!candicateCategories) candicateCategories = ['metaData', 'conditions', 'parameters', 'annotations']; for (var i = 0, l = candicateCategories.length; i < l; ++i) { var c = candicateCategories[i]; var v = this._getInfoBasedHashPropValue(c, key); if (Kekule.ObjUtils.notUnset(v)) return v; } return undefined; }, /** * Returns the value of a spectrum meta data. * @param {String} key * @returns {Variant} */ getMeta: function(key) { return this._getInfoBasedHashPropValue('metaData', key); }, /** * Set the value of a spectrum meta data. * @param {String} key * @param {Variant} value */ setMeta: function(key, value) { this._setInfoBasedHashpropValue('metaData', key, value); return this; }, /** * Returns the value of a spectrum condition. * @param {String} key * @returns {Variant} */ getCondition: function(key) { return this._getInfoBasedHashPropValue('conditions', key); }, /** * Set the value of a spectrum condition. * @param {String} key * @param {Variant} value */ setCondition: function(key, value) { this._setInfoBasedHashpropValue('conditions', key, value); return this; }, /** * Returns all the keys of spectrum condition list. * @returns {Array} */ getConditionKeys: function() { return this._getAllKeysOfInfoBasedHashProp('conditions'); }, /** * Returns the value of a spectrum parameter. * @param {String} key * @returns {Variant} */ getParameter: function(key) { return this._getInfoBasedHashPropValue('parameters', key); }, /** * Set the value of a spectrum parameter. * @param {String} key * @param {Variant} value */ setParameter: function(key, value) { this._setInfoBasedHashpropValue('parameters', key, value); return this; }, /** * Returns all the keys of spectrum parameter list. * @returns {Array} */ getParameterKeys: function() { return this._getAllKeysOfInfoBasedHashProp('parameters'); }, /** * Returns the value of a spectrum annotation. * @param {String} key * @returns {Variant} */ getAnnotation: function(key) { return this._getInfoBasedHashPropValue('annotations', key); }, /** * Set the value of a spectrum annotation. * @param {String} key * @param {Variant} value */ setAnnotation: function(key, value) { this._setInfoBasedHashpropValue('annotations', key, value); return this; }, /** * Returns all the keys of spectrum annotation list. * @returns {Array} */ getAnnotationKeys: function() { return this._getAllKeysOfInfoBasedHashProp('annotations'); }, /* * Create the data object. * @param variables * @returns {Kekule.Spectroscopy.SpectrumData} */ /* createData: function(variables) { var result = new Kekule.Spectroscopy.SpectrumData(null, variables); this.setPropStoreFieldValue('data', result); return result; } */ /** * Returns the recommended external units that can be converted from internal unit for this variable. * @param {Kekule.Spectroscopy.SpectrumVarDefinition} varDef * @returns {Array} Array of unit objects. */ getVarAvailableExternalUnitObjs: function(varDef) { return Kekule.Spectroscopy.DataValueConverterManager.getAltUnits(varDef, varDef.getInternalUnit? varDef.getInternalUnit(): varDef.getUnit(), null, this); }, /** * Returns the recommended external units that can be converted from internal unit for this variable. * @param {Kekule.Spectroscopy.SpectrumVarDefinition} varDef * @returns {Array} Array of unit symbols (string). */ getVarAvailableExternalUnitSymbols: function(varDef) { var unitObjs = Kekule.Spectroscopy.DataValueConverterManager.getAltUnits(varDef, varDef.getInternalUnit? varDef.getInternalUnit(): varDef.getUnit(), null, this); var result = []; for (var i = 0, l = unitObjs.length; i < l; ++i) { result.push(unitObjs[i].symbol); } return result; }, /* * Returns all keys in {@link Kekule.Spectroscopy.Spectrum#spectrumParams} property. * @returns {Array} */ /* getSpectrumParamKeys: function() { return this.getSpectrumParams()? Kekule.ObjUtils.getOwnedFieldNames(this.getSpectrumParams()): []; }, */ /* * Get param value from {@link Kekule.Spectroscopy.Spectrum#spectrumParams}. * @param {String} key * @returns {Variant} */ /* getSpectrumParam: function(key) { return this.getSpectrumParams()? this.getSpectrumParams()[key]: null; }, */ /* * Set value of a spectrum param. If key already exists, its value will be overwritten. * @param {String} key * @param {Variant} value */ /* setSpectrumParam: function(key, value) { this.doGetSpectrumParams(true)[key] = value; this.notifyPropSet('spectrumParams', this.getPropStoreFieldValue('spectrumParams')); } */ }); Kekule.ClassDefineUtils.addStandardCoordSupport(Kekule.Spectroscopy.Spectrum); Kekule.ClassDefineUtils.addStandardSizeSupport(Kekule.Spectroscopy.Spectrum); // register spectrum related units (function(){ var register = Kekule.Unit.register; // IR register('transmittance', 'transmittance', 'OpticalTransmittance', 1); // IT/I0 register('transmittance%', 'transmittance_percent', 'OpticalTransmittance', 1e-2); // IT/I0 register('reflectance', 'reflectance', 'OpticalReflectance', 1); // IR/I0 register('absorbance', 'absorbance', 'OpticalAbsorbance', 1); // log10(IR/I0) register('Kubelka Munk', 'Kubelka_Munk', 'OpticalKubelkaMunk', 1); // (1-R^2)/(2R) // MS register('counts', 'ms_count', 'Misc', null); register('relative abundance', 'ms_relative_abundance', 'SpectrumMS', null); register('m/z', 'ms_mass_charge_ratio', 'SpectrumMS', null); })(); })();
src/spectroscopy/kekule.spectrum.core.js
(function(){ "use strict"; var PS = Class.PropertyScope; var AU = Kekule.ArrayUtils; var KUnit = Kekule.Unit; /** * Base namespace for spectra. * @namespace */ Kekule.Spectroscopy = {}; /** * Enumeration of data mode of spectrum variable. * @enum */ Kekule.Spectroscopy.DataMode = { /** Value points are continuous, e.g. in IR data table. */ CONTINUOUS: 1, /** Value points are discrete, e.g. in MS peak table. */ PEAK: 2 }; /** * Some util methods about spectroscopy. * @class */ Kekule.Spectroscopy.Utils = { /** * Merge two data ranges. * Each item is a hash like {x: {min: minValue, max: maxValue}, y: {min: minValue, max: maxValue}}. * @param {Hash} r1 * @param {Hash} r2 * @returns {Hash} */ mergeDataRange: function(r1, r2) { var result = {}; var vars = AU.clone(Kekule.ObjUtils.getOwnedFieldNames(r1)); AU.pushUnique(vars, Kekule.ObjUtils.getOwnedFieldNames(r2)); for (var i = 0, l = vars.length; i < l; ++i) { var varSymbol =vars[i]; if (!r1[varSymbol]) result[varSymbol] = Object.extend(r2[varSymbol]); else if (!r2[varSymbol]) result[varSymbol] = Object.extend(r1[varSymbol]); else { result[varSymbol] = { 'min': (r1[varSymbol].min < r2[varSymbol].min)? r1[varSymbol].min: r2[varSymbol].min, 'max': (r1[varSymbol].max > r2[varSymbol].max)? r1[varSymbol].max: r2[varSymbol].max } } } return result; }, /** * Returns scale point information for a data range. * @param {Number} dataRangeMin * @param {Number} dataRangeMax * @param {Int} preferredScaleSectionCount * @returns {Hash} */ calcScalePointInfo: function(dataRangeMin, dataRangeMax, preferredScaleSectionCount) { if (preferredScaleSectionCount <= 0) preferredScaleSectionCount = 10; // avoid exception, set a default count value here var digitCounts = [Math.log10(Math.abs(dataRangeMin)), Math.log10(Math.abs(dataRangeMax))]; var digitCountMax = Math.floor(Math.max(digitCounts[0], digitCounts[1])); var digitCountMin = (Math.sign(dataRangeMin) === Math.sign(dataRangeMax))? Math.floor(Math.min(digitCounts[0], digitCounts[1], 0)): -Infinity; var useSciForm = (digitCountMax > 6); // need to use sci form if the digit num is very large to compact space var dataDelta = dataRangeMax - dataRangeMin; var deltaBetweenScales = dataDelta / preferredScaleSectionCount; var deltaBetweenScalesDigitCount = Math.max(Math.floor(Math.log10(Math.abs(deltaBetweenScales))), digitCountMin); var scaleBase = Math.pow(10, deltaBetweenScalesDigitCount); var actualDeltaBetweenScales; if (actualDeltaBetweenScales < 10 && dataDelta > 0.5) // major scale should be even number in 1-10 scope { actualDeltaBetweenScales = Math.ceil(actualDeltaBetweenScales / scaleBase / 2) * 2 * scaleBase; } else { actualDeltaBetweenScales = Math.ceil(deltaBetweenScales / scaleBase) * scaleBase; } var scaleFrom = Math.ceil(dataRangeMin / actualDeltaBetweenScales) * actualDeltaBetweenScales; var scaleTo = Math.floor(dataRangeMax / actualDeltaBetweenScales) * actualDeltaBetweenScales; var result = { 'useSciForm': useSciForm, 'scaleFrom': scaleFrom, 'scaleTo': scaleTo, 'scaleSectionCount': Math.round((scaleTo - scaleFrom) / actualDeltaBetweenScales), 'scaleValues': [], 'scaleBase': scaleBase, 'scaleFromOnBase': scaleFrom / scaleBase, 'scaleToOnBase': scaleTo / scaleBase, 'fixDigitsCountAfterPoint': Math.max(-deltaBetweenScalesDigitCount, 0) // record the recommended digits to appear after the decimal point }; for (var i = 0, l = result.scaleSectionCount + 1; i < l; ++i) { result.scaleValues.push(Math.round(i * actualDeltaBetweenScales / scaleBase) * scaleBase + scaleFrom); } //console.log(result, scaleBase); return result; } }; /** * A util object to manage the registered spectrum data value converters. * These converters are used to convert raw spectrum value from one unit to another (e.g., Hz to ppm in NMR). * @class */ Kekule.Spectroscopy.DataValueConverterManager = { /** @private */ _converters: [], /** * Register a converter object. * The converter object should implement the following methods: * { * convert: function(value, varDef, fromUnitObj, toUnitObj, spectrumDataSection, spectrum) => newValue, * canConvert: function(value, varDef, fromUnitObj, toUnitObj, spectrumDataSection, spectrum) => Bool, * getAltUnits: function(varDef, fromUnitObj, spectrumDataSection, spectrum) -> array (optional), returns the recommended alternative unitObjs for spectrum * } * @param {Object} converter */ register: function(converter) { DCM._converters.push(converter); }, /** * Unregister a converter. * @param {Object} converter */ unregister: function(converter) { var index = DMC._converters.indexOf(converter); if (index >= 0) DMC._converters.splice(index, 1); }, /** @private */ doConvert: function(value, varDef, fromUnit, toUnit, spectrumDataSection, spectrum) { if (fromUnit === toUnit) return value; if (!Kekule.NumUtils.isNormalNumber(value)) return value; var converters = DCM._converters; if (converters.length) { var fromUnitObj = Kekule.Unit.getUnit(fromUnit); var toUnitObj = Kekule.Unit.getUnit(toUnit); if (fromUnitObj && toUnitObj) { for (var i = converters.length - 1; i >= 0; --i) { var converter = converters[i]; if (converter.canConvert(value, varDef, fromUnitObj, toUnitObj, spectrumDataSection, spectrum)) return converter.convert(value, varDef, fromUnitObj, toUnitObj, spectrumDataSection, spectrum); } } } // no available converter found, can not convert Kekule.error(Kekule.$L('ErrorMsg.UNABLE_TO_CONVERT_BETWEEN_UNITS').format(fromUnitObj.getKey(), toUnitObj.getKey())); return null; }, /** @private */ getAltUnits: function(varDef, fromUnit, spectrumDataSection, spectrum) { var result = []; var converters = DCM._converters; if (converters.length) { var fromUnitObj = Kekule.Unit.getUnit(fromUnit); if (fromUnitObj) { for (var i = converters.length - 1; i >= 0; --i) { var converter = converters[i]; var subResult = converter.getAltUnits(varDef, fromUnitObj, spectrumDataSection, spectrum) || []; AU.pushUnique(result, subResult); } } } return result; } }; /** @ignore */ var DCM = Kekule.Spectroscopy.DataValueConverterManager; // register the default data value converter DCM.register({ convert: function(value, varDef, fromUnitObj, toUnitObj, spectrumDataSection, spectrum) { return fromUnitObj.convertValueTo(value, toUnitObj); }, canConvert: function(value, varDef, fromUnitObj, toUnitObj, spectrumDataSection, spectrum) { return fromUnitObj.canConvertValueTo(toUnitObj); }, getAltUnits: function(varDef, fromUnitObj, spectrumDataSection, spectrum) { var category = fromUnitObj.category; return category.getConvertableUnits(); } }); // register a converter to convert between NMR frequency and ppm DCM.register({ convert: function(value, varDef, fromUnitObj, toUnitObj, spectrumDataSection, spectrum) { var observeFreq = spectrum.getParameter('observeFrequency'); if (fromUnitObj.category === KUnit.Frequency) // from Hz to ppm { var freq = fromUnitObj.convertValueTo(value, observeFreq.getUnit()); var pureRatio = freq / observeFreq.getValue(); // in ppm * 1e10, in another word, the pure ratio return KUnit.Dimensionless.ONE.convertValueTo(pureRatio, toUnitObj); } else if (fromUnitObj.category === K.Unit.Dimensionless) // from ppm to Hz { var value2 = fromUnitObj.convertValueToStandard(value); var freq = value2 * observeFreq.getValue(); var freqUnit = KUnit.getUnit(observeFreq.getUnit()); return freqUnit.convertValueTo(freq, toUnitObj); } }, canConvert: function(value, varDef, fromUnitObj, toUnitObj, spectrumDataSection, spectrum) { if (spectrum.getSpectrumType() === Kekule.Spectroscopy.SpectrumType.NMR) { var observeFreq = spectrum.getParameter('observeFrequency'); if (observeFreq && Kekule.Unit.getUnit(observeFreq.getUnit()).category === Kekule.Unit.Frequency) { return (fromUnitObj.category === Kekule.Unit.Frequency && toUnitObj.category === Kekule.Unit.Dimensionless) || (fromUnitObj.category === Kekule.Unit.Dimensionless && toUnitObj.category === Kekule.Unit.Frequency); } } return false; }, getAltUnits: function(varDef, fromUnitObj, spectrumDataSection, spectrum) { var result = []; if (spectrum.getSpectrumType() === Kekule.Spectroscopy.SpectrumType.NMR) { var observeFreq = spectrum.getParameter('observeFrequency'); if (observeFreq && Kekule.Unit.getUnit(observeFreq.getUnit()).category === Kekule.Unit.Frequency) { if (fromUnitObj.category === Kekule.Unit.Frequency) result.push(Kekule.Unit.Dimensionless.PARTS_PER_MILLION); else if (fromUnitObj.category === Kekule.Unit.Dimensionless) result = result.concat(Kekule.Unit.Frequency.getConvertableUnits()); } } return result; } }); // register a converter to convert between IR wave length and wave number DCM.register({ convert: function(value, varDef, fromUnitObj, toUnitObj, spectrumDataSection, spectrum) { if (fromUnitObj.category === KUnit.Length) // from wave length to wave number { var standardWaveLengthScalar = fromUnitObj.convertValueToStandardEx(value); var standardWaveNumber = 1 / standardWaveLengthScalar.value; return toUnitObj.convertValueFromStandard(standardWaveNumber); } else if (fromUnitObj.category === KUnit.WaveNumber) // from wave number to wave length { var standardWaveNumberScalar = fromUnitObj.convertValueToStandardEx(value); var standardWaveLength = 1 / standardWaveNumberScalar.value; return toUnitObj.convertValueFromStandard(standardWaveLength); } }, canConvert: function(value, varDef, fromUnitObj, toUnitObj, spectrumDataSection, spectrum) { if (spectrum.getSpectrumType() === Kekule.Spectroscopy.SpectrumType.IR) { return (fromUnitObj.category === Kekule.Unit.Length && toUnitObj.category === Kekule.Unit.WaveNumber) || (fromUnitObj.category === Kekule.Unit.WaveNumber && toUnitObj.category === Kekule.Unit.Length); } return false; }, getAltUnits: function(varDef, fromUnitObj, spectrumDataSection, spectrum) { var result; if (spectrum.getSpectrumType() === Kekule.Spectroscopy.SpectrumType.IR) { if (fromUnitObj.category === Kekule.Unit.Length) result = [Kekule.Unit.WaveNumber.RECIPROCAL_CENTIMETER]; else if (fromUnitObj.category === Kekule.Unit.WaveNumber) result = [Kekule.Unit.Length.getConvertableUnits()]; } return result; } }); /** * Variable used in spectrum. * @class * @augments Kekule.VarDefinition * * @property {String} internalUnit Unit that used in internal data storage. * @property {String} externalUnit Unit that used to expose data to public. */ Kekule.Spectroscopy.SpectrumVarDefinition = Class.create(Kekule.VarDefinition, /** @lends Kekule.Spectroscopy.SpectrumVarDefinition# */ { /** @private */ CLASS_NAME: 'Kekule.Spectroscopy.SpectrumVarDefinition', initProperties: function() { this.defineProp('internalUnit', {'dataType': DataType.STRING, 'serializable': false, 'getter': function() { return this.getUnit(); }, 'setter': function(value) { this.setUnit(value); } }); this.defineProp('externalUnit', {'dataType': DataType.STRING}); }, /** * Returns the actual external unit of var. * Usually this function returns the value of {@link Kekule.Spectroscopy.SpectrumVarDefinition.externalUnit} * If it is not set, the result will be the same as internalUnit. * @returns {String} */ getActualExternalUnit: function() { return this.getExternalUnit() || this.getInternalUnit(); }, /** * Whether the external unit setting of this var differs from the internal unit. * @returns {Bool} */ hasDifferentExternalUnit: function() { var externalUnit = this.getExternalUnit(); return !!(externalUnit && externalUnit !== this.getInternalUnit()); } }); /** * Represent part of data in a spectrum. * @class * * @param {String} name * @param {Kekule.Spectroscopy.SpectrumData} parent Parent spectrum data object. * @param {Array} localVariables Array of variable definition objects or symbols. * * @property {Kekule.Spectroscopy.SpectrumData} parent Parent spectrum data object. * @property {Array} localVarInfos Stores the local variable information. Each item is a hash containing fields {'symbol', 'range'(optional)}. * @property {Array} varSymbols Array of variable symbols such as ['X', 'Y']. * @property {Int} mode Data mode of section, continuous or peak. * @property {Hash} peakRoot * @property {String} name * @property {String} title */ Kekule.Spectroscopy.SpectrumDataSection = Class.create(Kekule.ChemObject, /** @lends Kekule.Spectroscopy.SpectrumDataSection# */ { /** @private */ CLASS_NAME: 'Kekule.Spectroscopy.SpectrumDataSection', /** @private */ initialize: function(name, parent, localVariables) { this.setPropStoreFieldValue('name', name); this.setPropStoreFieldValue('localVarInfos', []); this.setPropStoreFieldValue('dataItems', []); this.setPropStoreFieldValue('parent', parent); this.tryApplySuper('initialize', []); //this.setLocalVarSymbols(localVariables); if (localVariables) this.setLocalVariables(localVariables); this.setDataSorted(true); this._cache = {}; // private //this.setPropStoreFieldValue('variables', variables? AU.clone(variables): []); }, doFinalize: function() { if (this.getParent() && this.getParent().removeChild) { // remove item in parent first this.getParent().removeChild(this); } this.clear(); var variables = this.getVariables(); for (var i = 0, l = variables.length; i < l; ++i) { variables[i].finalize(); } this.setPropStoreFieldValue('localVarInfos', null); this.tryApplySuper('doFinalize'); }, /** @private */ initProperties: function() { this.defineProp('parent', {'dataType': 'Kekule.MapEx', 'setter': null, 'serializable': false}); this.defineProp('name', {'dataType': DataType.STRING}); this.defineProp('title', {'dataType': DataType.STRING}); /* this.defineProp('variables', {'dataType': DataType.ARRAY}); this.defineProp('varSymbols', {'dataType': DataType.ARRAY, 'setter': null, 'scope': PS.PRIVATE, 'getter': function() { var result = []; var list = this.getVariables(); for (var j = 0, jj = list.length; j < jj; ++j) { var varDef = list[j]; result.push(varDef.getSymbol()); } return result; }}); */ this.defineProp('localVarInfos', {'dataType': DataType.ARRAY, 'setter': null}); this.defineProp('localVarSymbols', {'dataType': DataType.ARRAY, 'scope': PS.PRIVATE, 'serializable': false, 'getter': function() { var result = []; var list = this.getActualLocalVarInfos(); if (list && list.length) { for (var j = 0, jj = list.length; j < jj; ++j) { var info = list[j]; //result.push(info.varDef.getSymbol()); result.push(info.symbol); } } /* else // localVarInfos is not initialized yet, read from the storage result = this.getPropStoreFieldValue('localVarSymbols'); */ return result; }, 'setter': function(value) { var v = value || []; //this.setPropStoreFieldValue('localVarSymbols', v); this._updateLocalVarInfosFromSymbols(v); } }); this.defineProp('mode', {'dataType': DataType.INT, 'enumSource': Kekule.Spectroscopy.DataMode, 'setter': function(value) { if (this.getMode() !== value) { //console.log('set mode', value); this.setPropStoreFieldValue('mode', value); this.notifyDataChange(); } } }); this.defineProp('defPeakRoot', {'dataType': DataType.Hash}); // private, stores the data items, each item is a hash, e.g. {x: 1, y: 10, w: 2} this.defineProp('dataItems', {'dataType': DataType.ARRAY, 'setter': null, 'scope': PS.PRIVATE}); }, /** @ignore */ initPropValues: function() { this.tryApplySuper('initPropValues'); this.setMode(Kekule.Spectroscopy.DataMode.CONTINUOUS); }, // custom save / load method /** @ignore */ doSaveProp: function(obj, prop, storageNode, serializer) { if (!prop.serializable) return; var propName = prop.name; if (propName === 'dataItems') { var node = serializer.createChildStorageNode(storageNode, serializer.propNameToStorageName('dataItems'), false); var subNode = serializer.createChildStorageNode(node, serializer.propNameToStorageName('values'), true); // create sub node for array serializer.save(obj.getDataItems(), subNode); // save array values in this sub node // extract all extra info of data array and save them var extraInfos = obj._extractAllExtraInfoOfDataItems(); if (extraInfos.length) { var subNode = serializer.createChildStorageNode(node, serializer.propNameToStorageName('extras'), true); serializer.save(extraInfos, subNode); } return true; // this property is handled, do not use default save method } else return false; // use the default method }, /** @ignore */ doLoadProp: function(obj, prop, storageNode, serializer) { if (!prop.serializable) return; var propName = prop.name; if (propName === 'dataItems') { var items = []; var node = serializer.getChildStorageNode(storageNode, serializer.propNameToStorageName('dataItems')); var subNode = serializer.getChildStorageNode(node, serializer.propNameToStorageName('values')); // get sub node for array serializer.load(items, subNode); obj.setPropStoreFieldValue('dataItems', items); // then the extra info var subNode = serializer.getChildStorageNode(node, serializer.propNameToStorageName('extras')); if (subNode) { var extras = []; serializer.load(extras, subNode); obj._writeExtraInfoOfDataItems(extras); } return true; } else return false; // use the default method }, /** @private */ _extractAllExtraInfoOfDataItems: function() { var result = []; for (var i = 0, l = this.getDataCount(); i < l; ++i) { var info = this.getExtraInfoAt(i); if (info) result.push({'index': i, 'info': info}); } return result; }, /** @private */ _writeExtraInfoOfDataItems: function(extras) { for (var i = 0, l = extras.length; i < l; ++i) { var info = extras[i]; this.setExtraInfoAt(info.index, info.info); } }, /* @ignore */ /* parentChanged: function(newParent, oldParent) { //console.log('parent changed', newParent && newParent.getClassName(), oldParent); var result = this.tryApplySuper('parentChanged', newParent, oldParent); // after changing of parent the local var info may be changed as well this._updateLocalVarInfosFromSymbols(this.getLocalVarSymbols()); return result; }, */ /** * Returns whether this data section containing the peak data. * @returns {Bool} */ isPeakSection: function() { return this.getMode() === Kekule.Spectroscopy.DataMode.PEAK; }, /** * Returns the actual parent SpectrumData object. * @returns {Kekule.Spectroscopy.Spectrum} * @private */ getParentSpectrum: function() { var p = this.getParent(); while (p && !(p instanceof Kekule.Spectroscopy.Spectrum) && p.getParent) { p = p.getParent(); } return p; }, /** * Returns the variable definition of parent spectrum data. * @returns {Array} */ getParentVariables: function() { var parent = this.getParentSpectrum(); return (parent && parent.getVariables()) || []; }, /** * Returns the actual local variable infos. * User should use this method rather than ref to localVarInfos property. * @returns {Array} */ getActualLocalVarInfos: function() { var result = AU.clone(this.getLocalVarInfos()); if (!result || !result.length) // inherit all from parent spectrum { var vars = this.getParentVariables(); for (var i = 0, l = vars.length; i < l; ++i) { result.push({'symbol': vars[i].symbol}); } } return result; }, /** @private */ _updateLocalVarInfosFromSymbols: function(varSymbols, silent) { var v = varSymbols || []; var infos = []; var parent = this.getParentSpectrum(); for (var i = 0, l = v.length; i < l; ++i) { var item = v[i]; this._pushLocalVariable(parent, item, infos); } //console.log('update local var infos', varSymbols, infos, parent); this.setPropStoreFieldValue('localVarInfos', infos); //this.setLocalVarInfos(infos); this.notifyPropSet('localVarInfos', infos, silent); }, /** @private */ _pushLocalVariable: function(parent, varSymbol, targetArray) { if (!targetArray) targetArray = this.getLocalVarInfos(); //var parent = this.getParent(); if (parent && parent.getVariable) { var varDef = parent.getVariable(varSymbol); if (varDef) { targetArray.push({/*'varDef': varDef,*/ 'symbol': varSymbol}); } } }, /** * Set the local variable symbols or definitions. * @param {Array} variables Array of var defintion or symbols. */ setLocalVariables: function(variables) { var localVar; var varDefs = [], varSymbols = []; for (var i = 0, l = variables.length; i < l; ++i) { localVar = variables[i]; if (typeof(localVar) === 'string') // a var symbol { varSymbols.push(localVar); } else // var definition { varDefs.push(localVar); } } if (varDefs.length) { this.setPropStoreFieldValue('localVarInfos', varDefs); this.notifyPropSet('localVarInfos', varDefs); } else if (varSymbols.length) { this._updateLocalVarInfosFromSymbols(varSymbols); } }, /** * Returns the local variable information index of variable. * @param {Variant} varIndexOrNameOrDef * @returns {Int} */ getLocalVarInfoIndex: function(varIndexOrNameOrDef) { var result = -1; var localVarInfos = this.getActualLocalVarInfos(); if (typeof (varIndexOrNameOrDef) === 'number') result = varIndexOrNameOrDef; else // if (varIndexOrNameOrDef instanceof Kekule.Spectroscopy.SpectrumVarDefinition) { var symbol = varIndexOrNameOrDef.getSymbol? varIndexOrNameOrDef.getSymbol(): varIndexOrNameOrDef; for (var i = 0, l = localVarInfos.length; i < l; ++i) { /* var varDef = localVarInfos[i].varDef; if (varDef === varIndexOrNameOrDef || varDef.getSymbol() === varIndexOrNameOrDef) { result = i; break; } */ if (symbol === localVarInfos[i].symbol) { result = i; break; } } } return result; }, /** * Returns the local information of variable. * @param {Variant} varIndexOrNameOrDef * @returns {Hash} */ getLocalVarInfo: function(varIndexOrNameOrDef) { var index = this.getLocalVarInfoIndex(varIndexOrNameOrDef); var result = (index >= 0)? this.getActualLocalVarInfos()[index]: null; /* if (result) { var parent = this.getParentSpectrum(); if (parent) { var symbol = result.symbol; result = Object.create(result); // avoid affect the original hash object result.varDef = parent.getVariable(symbol); } } */ return result; /* var result; var localVarInfos = this.getActualLocalVarInfos(); if (typeof (varIndexOrNameOrDef) === 'number') result = localVarInfos[varIndexOrNameOrDef]; else // if (varIndexOrNameOrDef instanceof Kekule.Spectroscopy.SpectrumVarDefinition) { for (var i = 0, l = localVarInfos.length; i < l; ++i) { var varDef = localVarInfos[i].varDef; if (varDef === varIndexOrNameOrDef || varDef.getSymbol() === varIndexOrNameOrDef) { result = localVarInfos[i]; break; } } } return result; */ }, /** * Returns the local information value of a variable. * @param {Variant} varIndexOrNameOrDef * @param {String} key * @returns {Variant} */ getLocalVarInfoValue: function(varIndexOrNameOrDef, key) { var info = this.getLocalVarInfo(varIndexOrNameOrDef); return info && info[key]; }, /** * Set a local information of variable. * @param {Variant} varIndexOrNameOrDef * @param {String} key * @param {Variant} value */ setLocalVarInfoValue: function(varIndexOrNameOrDef, key, value) { var info = this.getLocalVarInfo(varIndexOrNameOrDef); info[key] = value; }, /** * Returns the variable definition of a local variable. * @param {Variant} varIndexOrNameOrDef * @returns {Kekule.Spectroscopy.SpectrumVarDefinition} */ getLocalVarDef: function(varIndexOrNameOrDef) { //return this.getLocalVarInfoValue(varIndexOrNameOrDef, 'varDef'); var symbol = this.getLocalVarInfoValue(varIndexOrNameOrDef, 'symbol'); var parent = this.getParentSpectrum(); return parent && parent.getVariable(symbol); }, /** * Returns the local variable info of certain dependency. * @param {Int} dependency * @returns {Array} */ getLocalVarInfoOfDependency: function(dependency) { var result = []; var localVarInfos = this.getActualLocalVarInfos(); for (var i = 0, l = localVarInfos.length; i < l; ++i) { var varDef = this.getLocalVarDef(i); if (varDef.getDependency() === dependency) { var info = Object.extend({}, localVarInfos[i]); info.varDef = varDef; result.push(info); } } return result; }, /** * Returns the from/to value of a continuous variable. * @param {Variant} varNameOrIndexOrDef * @returns {Hash} Hash of {fromValue, toValue} */ getContinuousVarRange: function(varIndexOrNameOrDef) { var parent = this.getParent(); var varInfo = this.getLocalVarInfo(varIndexOrNameOrDef); return varInfo.continuousRange || (parent && parent.getContinuousVarRange && parent.getContinuousVarRange(varInfo.symbol)); /* var result = this.getLocalVarInfoValue(varIndexOrNameOrDef, 'continuousRange'); if (!result) { var parent = this.getParent(); result = parent && parent.getContinuousVarRange(varInfo.varDef); } return result; */ }, /** * Set the from/to value of a variable and mark it as a continuous one. * @param {Variant} varNameOrIndexOrDef * @param {Number} fromValue * @param {Number} toValue */ setContinuousVarRange: function(varIndexOrNameOrDef, fromValue, toValue) { /* var varInfo = this.getLocalVarInfo(varIndexOrNameOrDef); varInfo.range = {'fromValue': fromValue, 'toValue': toValue}; */ this.setLocalVarInfoValue(varIndexOrNameOrDef, 'continuousRange', {'fromValue': fromValue, 'toValue': toValue}); return this; }, /** * Remove the continuous information of a variable. * @param {Variant} varIndexOrNameOrDef */ clearContinuousVarRange: function(varIndexOrNameOrDef) { /* var varInfo = this.getLocalVarInfo(varIndexOrNameOrDef); varInfo.range = null; */ this.setLocalVarInfoValue(varIndexOrNameOrDef, 'continuousRange', null); return this; }, /** * Set the local default value of a variable when the concrete value in spectrum is absent. * @param {Variant} varIndexOrNameOrDef * @param {Number} value */ setDefaultVarValue: function (varIndexOrNameOrDef, value) { this.setLocalVarInfoValue(varIndexOrNameOrDef, 'defaultValue', value); return this; }, /** * Clear the local default value of a variable. * @param {Variant} varIndexOrNameOrDef */ clearDefaultVarValue: function(varIndexOrNameOrDef) { return this.setDefaultVarValue(varIndexOrNameOrDef, null); }, /** * Get the local default value of a variable when the concrete value in spectrum is absent. * @param {Variant} varIndexOrNameOrDef * @returns {Number} */ getDefaultVarValue: function(varIndexOfNameOrDef) { var result = this.getLocalVarInfoValue(varIndexOfNameOrDef, 'defaultValue'); if (Kekule.ObjUtils.isUnset(result)) { var varInfo = this.getLocalVarInfo(varIndexOfNameOrDef); var parent = this.getParent(); result = parent && parent.getDefaultVarValue(varInfo.symbol); } return result; }, /** * Returns the range when displaying spectrum of a variable. * @param {Variant} varNameOrIndexOrDef * @param {Hash} options May include fields: * { * autoCalc: Bool. If true, when explicit display range is not set, the number range of variable will be calculated and returned. * basedOnInternalUnit: Bool. If true, the returned value will be based on internal unit rather than the external unit of variable. * } * @returns {Hash} Hash of {min, max} */ getVarDisplayRange: function(varIndexOrNameOrDef, options) { var op = options || {}; //var varDef = this.getVar var varIndex = this.getLocalVarInfoIndex(varIndexOrNameOrDef); var info = this.getLocalVarInfo(varIndex); var result = info.displayRange? Object.extend({}, info.displayRange): null; // avoid affect the original values if (!result) // check the var definition { //var varDef = info.varDef; var varDef = this.getLocalVarDef(varIndex); var varDefRange = varDef.getInfoValue('displayRange'); if (varDefRange) result = Object.extend({}, varDefRange); // avoid affecting the original values } if (!result && op.autoCalc) result = this.calcDataRange(varIndex, {basedOnInternalUnit: true})[info.symbol]; // get range with internal unit first //result = this.calcDataRange(varIndexOrNameOrDef)[info.varDef.getSymbol()]; // do not forget to do unit conversion if necessary if (!op.basedOnInternalUnit) { result = this._convertDataRangeToExternalUnit(result, varIndex); /* var fieldNames = Kekule.ObjUtils.getOwnedFieldNames(result); for (var i = 0, l = fieldNames.length; i < l; ++i) { var fname = fieldNames[i]; //result[fname] = this._convertVarValueToExternal(result[fname], varIndex); } // after conversion, the min/max values may be reversed if (result && result.min > result.max) { var temp = result.min; result.min = result.max; result.max = temp; } */ } return result; }, /** * Set the range when displaying spectrum of a variable. * @param {Variant} varNameOrIndexOrDef * @param {Number} minValue * @param {Number} maxValue * @param {Hash} options Extra options, may include fields: * { * basedOnExternalUnit: Bool * } */ setVarDisplayRange: function(varIndexOrNameOrDef, minValue, maxValue, options) { var op = options || {}; var range = {'min': minValue, 'max': maxValue}; if (op.basedOnExternalUnit) // need to convert values to internal unit first { var varIndex = this.getLocalVarInfoIndex(varIndexOrNameOrDef); range = this._convertDataRangeToInternalUnit(range, varIndex); } this.setLocalVarInfoValue(varIndexOrNameOrDef, 'displayRange', range); return this; }, /** * Remove the display range information of a variable. * @param {Variant} varIndexOrNameOrDef */ clearVarDisplayRange: function(varIndexOrNameOrDef) { this.setLocalVarInfoValue(varIndexOrNameOrDef, 'displayRange',null); return this; }, /** * Returns display range of variables. * @param {Array} targetVariables Array of variable definition or symbol. * If not set, all variables will be considered. * @param {Hash} options May include fields: * { * autoCalc: Bool. If true, when explicit display range is not set, the number range of variable will be calculated and returned. * basedOnInternalUnit: Bool. If true, the returned value will be based on internal unit rather than the external unit of variable. * } * @returns {Hash} */ getDisplayRangeOfVars: function(targetVariables, options) { var result = {}; if (!targetVariables) targetVariables = this.getLocalVarSymbols(); for (var i = 0, l = targetVariables.length; i < l; ++i) { var symbol = this._varToVarSymbol(targetVariables[i]); result[symbol] = this.getVarDisplayRange(targetVariables[i], options); } return result; }, /** @private */ _varToVarSymbol: function(targetVar) { /* var info = this.getLocalVarInfo(targetVar); if (info) return info.varDef.getSymbol(); */ var varDef = this.getLocalVarDef(targetVar); if (varDef) return varDef.getSymbol(); else return null; }, /** @private */ _varToVarSymbols: function(targetVariables) { var targetVarSymbols = []; var vars = targetVariables? AU.toArray(targetVariables): null; if (!vars) targetVarSymbols = this.getLocalVarSymbols(); else { for (var i = 0, l = vars.length; i < l; ++i) { targetVarSymbols.push(this._varToVarSymbol(vars[i])) } } return targetVarSymbols; }, /** @private */ _getDefaultPeakRoot: function() { var result = {}; var varInfos = this.getActualLocalVarInfos(); for (var i = 0, l = varInfos.length; i < l; ++i) { //var varDef = varInfos[i].varDef; var varDef = this.getLocalVarDef(i); if (varDef.getDependency() !== Kekule.VarDependency.INDEPENDENT) { result[varDef.getSymbol()] = 0; } } return result; }, /** * Iterate all data items and calculate the min/max value of each variable. * Note this function will always returns the value based on internal unit, * regardless of whether the external unit is set or not. * @param {Array} targetVariables Array of variable definition or symbol. * If not set, all variables will be calculated. * @param {Hash} options Extra calculation options, may include fields: * { * basedOnInternalUnit: Bool. If true, the returned value will be based on internal unit rather than the external unit of variable. * ignorePeakRoot: Bool. If true, the peak root value will be ignored during calculation. * } * @returns {Hash} */ calcDataRange: function(targetVariables, options) { var op = options || {}; // since calculation of data range is a time-consuming job, here we cache the result var targetVarSymbols = this._varToVarSymbols(targetVariables); var notNum = function (v) { return !Kekule.NumUtils.isNormalNumber(v); }; var ranges = {}; var rangeCache = this._cache.ranges; if (!rangeCache) { rangeCache = {}; this._cache.ranges = rangeCache; } var remainingVarSymbols = []; for (var i = 0, l = targetVarSymbols.length; i < l; ++i) { var symbol = targetVarSymbols[i]; if (rangeCache[symbol]) // cached { // console.log('got range from cache', symbol); ranges[symbol] = Object.extend({}, rangeCache[symbol]); } else remainingVarSymbols.push(symbol); } if (remainingVarSymbols.length) { var self = this; var isPeakData = this.isPeakSection(); this.forEach(function (dataValue, index) { for (var i = 0, l = remainingVarSymbols.length; i < l; ++i) { var symbol = remainingVarSymbols[i]; if (notNum(dataValue[symbol])) continue; if (!ranges[symbol]) ranges[symbol] = {}; ranges[symbol].min = notNum(ranges[symbol].min) ? dataValue[symbol] : Math.min(ranges[symbol].min, dataValue[symbol]); ranges[symbol].max = notNum(ranges[symbol].max) ? dataValue[symbol] : Math.max(ranges[symbol].max, dataValue[symbol]); // consider peak root value if (isPeakData && !op.ignorePeakRoot) { var peakRootValue = self.getPeakRootValueOf(dataValue); if (peakRootValue && !notNum(peakRootValue[symbol])) { ranges[symbol].min = notNum(ranges[symbol].min) ? peakRootValue[symbol] : Math.min(ranges[symbol].min, peakRootValue[symbol]); ranges[symbol].max = notNum(ranges[symbol].max) ? peakRootValue[symbol] : Math.max(ranges[symbol].max, peakRootValue[symbol]); } } } }, null, {basedOnInternalUnit: true}); // here we use the internal unit, to keep the cache with the same unit // cache the range values for (var i = 0, l = remainingVarSymbols.length; i < l; ++i) { var symbol = remainingVarSymbols[i]; rangeCache[symbol] = Object.extend({}, ranges[symbol]); } } /* if (this.getMode() === Kekule.Spectroscopy.DataMode.PEAK) // consider the peak root { var peakRoot = this.getDefPeakRoot() || this._getDefaultPeakRoot(); for (var i = 0, l = targetVarSymbols.length; i < l; ++i) { var symbol = targetVarSymbols[i]; var rootValue = peakRoot[symbol]; if (!notNum(rootValue)) { ranges[symbol].min = Math.min(ranges[symbol].min, rootValue); ranges[symbol].max = Math.max(ranges[symbol].max, rootValue); } } } */ //console.log(this.getMode(), peakRoot, ranges); if (!op.basedOnInternalUnit) { for (var i = 0, l = targetVarSymbols.length; i < l; ++i) { var symbol = targetVarSymbols[i]; ranges[symbol] = this._convertDataRangeToExternalUnit(ranges[symbol], i); } } return ranges; }, /** @private */ _convertDataRangeToExternalUnit: function(range, varIndex) { if (!range) return range; var fieldNames = ['min', 'max']; for (var i = 0, l = fieldNames.length; i < l; ++i) { var fname = fieldNames[i]; range[fname] = this._convertVarValueToExternal(range[fname], varIndex); } // after conversion, the min/max values may be reversed if (range.min > range.max) { var temp = range.min; range.min = range.max; range.max = temp; } return range; }, /** @private */ _convertDataRangeToInternalUnit: function(range, varIndex) { if (!range) return range; var fieldNames = ['min', 'max']; for (var i = 0, l = fieldNames.length; i < l; ++i) { var fname = fieldNames[i]; range[fname] = this._convertVarValueToInternal(range[fname], varIndex); } // after conversion, the min/max values may be reversed if (range.min > range.max) { var temp = range.min; range.min = range.max; range.max = temp; } return range; }, /** * Iterate all data items and calculate the average value of each variable. * Note this function will always returns the value based on internal unit, * regardless of whether the external unit is set or not. * @param {Array} targetVariables Array of variable definition or symbol. * If not set, all variables will be calculated. * @param {Hash} options Extra calculation options, may include fields: * { * basedOnInternalUnit: Bool. If true, the returned value will be based on internal unit rather than the external unit of variable. * } * @returns {Hash} */ calcDataAverage: function(targetVariables, options) { var op = options || {}; var targetVarSymbols = this._varToVarSymbols(targetVariables); var averages = {}; var averageCache = this._cache.averages; var notNum = function (v) { return !Kekule.NumUtils.isNormalNumber(v); }; if (!averageCache) { averageCache = {}; this._cache.averages = averageCache; } var remainingVarSymbols = []; for (var i = 0, l = targetVarSymbols.length; i < l; ++i) { var symbol = targetVarSymbols[i]; if (!notNum(averageCache[symbol])) // cached { averages[symbol] = averageCache[symbol]; } else remainingVarSymbols.push(symbol); } if (remainingVarSymbols.length) { var sums = {}; var counts = {}; for (var i = 0, l = remainingVarSymbols.length; i < l; ++i) { sums[remainingVarSymbols[i]] = 0; counts[remainingVarSymbols[i]] = 0; } this.forEach(function (dataValue, index) { for (var i = 0, l = remainingVarSymbols.length; i < l; ++i) { var symbol = remainingVarSymbols[i]; var value = dataValue[symbol]; if (notNum(value)) continue; sums[symbol] += value; ++counts[symbol]; } }, null, {basedOnInternalUnit: true}); // cache the average values for (var i = 0, l = remainingVarSymbols.length; i < l; ++i) { var symbol = remainingVarSymbols[i]; averages[symbol] = sums[symbol] / counts[symbol]; averageCache[symbol] = averages[symbol]; } } if (!op.basedOnInternalUnit) { for (var i = 0, l = targetVarSymbols.length; i < l; ++i) { var symbol = targetVarSymbols[i] averages[symbol] = this._convertVarValueToExternal(averages[symbol, i]); } } return averages; }, /** * Returns the symbols of continuous variable. * @returns {Array} */ getContinuousVarSymbols: function() { var result = []; var varInfos = this.getActualLocalVarInfos(); for (var i = 0, l = varInfos.length; i < l; ++i) { if (this.getContinuousVarRange(i)) result.push(varInfos[i].symbol); } return result; }, /** @private */ _itemHashToArray: function(hashValue) { if (!hashValue) return null; var result = []; var symbols = this.getLocalVarSymbols(); for (var i = 0, l = symbols.length; i < l; ++i) { result.push(hashValue[symbols[i]]); } // then the extra fields if (hashValue._extra) result._extra = hashValue._extra; else { // then the remaining fields of hashValue, storing in _extra field of array item var remainingFields = AU.exclude(Kekule.ObjUtils.getOwnedFieldNames(hashValue, false), symbols); if (remainingFields.length) result._extra = {}; for (var i = 0, l = remainingFields.length; i < l; ++i) { result._extra[remainingFields[i]] = hashValue[remainingFields[i]]; } } return result; }, /** @private */ _itemArrayToHash: function(arrayValue, options) { if (!arrayValue) return null; var result = {}; var symbols = this.getLocalVarSymbols(); for (var i = 0, l = Math.min(symbols.length, arrayValue.length); i < l; ++i) { var value; if (!options.basedOnInternalUnit) value = this._convertVarValueToExternal(arrayValue[i], i); else value = arrayValue[i]; result[symbols[i]] = value; }// if (arrayValue._extra) { //result = Object.extend(result, arrayValue._extra); result._extra = arrayValue._extra; } return result; }, /** @private */ _convertVarValueToNewUnit: function(value, varDef, fromUnit, toUnit) { if (!Kekule.NumUtils.isNormalNumber(value)) // not a number, usually can not be converted return value; //return Kekule.UnitUtils.convertValue(value, fromUnit, toUnit); return Kekule.Spectroscopy.DataValueConverterManager.doConvert(value, varDef, fromUnit, toUnit, this, this.getParent()); }, /** * Convert a raw value (storaged value) to the one exposed to external with a different unit. * @param {Number} value * @param {Int} varIndex * @returns {Number} value * @private */ _convertVarValueToExternal: function(value, varIndex) { var result = value; var varDef = this.getLocalVarDef(varIndex); if (varDef && varDef.hasDifferentExternalUnit && varDef.hasDifferentExternalUnit()) // need to do a value conversion { result = this._convertVarValueToNewUnit(value, varDef, varDef.getInternalUnit(), varDef.getActualExternalUnit()); } return result; }, /** * Convert a value with external unit to the one with internal unit. * @param {Number} value * @param {Int} varIndex * @returns {Number} value * @private */ _convertVarValueToInternal: function(value, varIndex) { var result = value; var varDef = this.getLocalVarDef(varIndex); if (varDef && varDef.hasDifferentExternalUnit && varDef.hasDifferentExternalUnit()) // need to do a value conversion { result = this._convertVarValueToNewUnit(value, varDef, varDef.getActualExternalUnit(), varDef.getInternalUnit()); } return result; }, /** * Returns whether data in this section has been sorted. * @returns {Bool} */ isDataSorted: function() { return this._sorted || this.getDataCount() <= 1; }, /** * Manually set the sorted state of data. * @param {Bool} value */ setDataSorted: function(value) { this._sorted = !!value; return this; }, /** * Sort all data items. * @param {Func} func Optional, func(hash1, hash2). If not set, data items will be sorted by default method. */ sort: function(func) { if (this.isDataSorted()) return; var self = this; var sortFunc = func? function(a1, a2) { return func(self._itemArrayToHash(a1), self._itemArrayToHash(a2)); }: function(a1, a2) { return AU.compare(a1, a2); } this.getDataItems().sort(sortFunc); this.setDataSorted(true); return this; }, /** * Returns the count of data items. * @returns {Int} */ getDataCount: function() { return this.getDataItems().length; }, /** @private */ clearCache: function() { this._cache = {}; }, /** * Notify the data of this section has been changed. * @private */ notifyDataChange: function() { var items = this.getDataItems(); this.setDataSorted(false); this.clearCache(); this.notifyPropSet('dataItems', items); this.invokeEvent('dataChange', {'data': items}) }, /** * Clear all data items. */ clear: function() { this.setDataItems([]); this.notifyDataChange(); this.setDataSorted(true); // empty data are always sorted }, /** * Add new data item. The item is can be a hash or an array. * If it is a hash, the hash fields must matches {@link Kekule.Spectroscopy.SpectrumData.independentVars} and {@link Kekule.Spectroscopy.SpectrumData.dependentVars}. * If it is an array, the values in array will automatically mapped to independent and dependent vars. * @param {Variant} item */ appendData: function(item) { var d; if (!DataType.isArrayValue(item)) // is hash value, convert it to array first d = this._itemHashToArray(item); else d = item; if (d) { var items = this.getDataItems(); items.push(d); this.notifyDataChange(); return d; } }, /** * Remove a data item. * @param {Array} item */ removeData: function(item) { var items = this.getDataItems(); var index = items.indexOf(item); return this.removeDataItemAt(index); }, /** * Remove a data item at index. * @param {Int} index */ removeDataAt: function(index) { var result = this.getDataItems().splice(index, 1); this.notifyDataChange(); return result; }, /** * Get the data value at index. * @param {Int} index * @returns {Array} The arrayed form of value. */ getRawValueAt: function(index) { var rawValue = this.getDataItems()[index]; if (rawValue) { var result = AU.clone(rawValue); var isContinousData = this.getMode() === Kekule.Spectroscopy.DataMode.CONTINUOUS; //if (this.getMode() === Kekule.Spectroscopy.DataMode.CONTINUOUS) { var dataIntervalCount = this.getDataCount() - 1; // check if there are omitted values for (var i = 0, l = result.length; i < l; ++i) { var v = result[i]; if (DataType.isUndefinedValue(v) || DataType.isNullValue(v)) // maybe omitted? check if it is a continous variable or it has a default value { var defValue = this.getDefaultVarValue(i); if (Kekule.ObjUtils.notUnset(defValue)) v = defValue; else if (isContinousData) { var range = this.getContinuousVarRange(i); if (range) { v = (dataIntervalCount > 1)? ((index / dataIntervalCount) * (range.toValue - range.fromValue) + range.fromValue): range.fromValue; //console.log('adjusted v', v, range); } } } result[i] = v; } } if (rawValue._extra) // copy the extra properties result._extra = rawValue._extra; return result; } else return null; }, /** @private */ getHashValueAt: function(index, options) { return this._itemArrayToHash(this.getRawValueAt(index), options || {}); }, /** * Get the data value at index. * @param {Int} index * @returns {Hash} The hashed form of value. */ getValueAt: function(index, options) { return this.getHashValueAt(index, options); }, /** * Set the data value at index. * @param {Int} index * @param {Array} The arrayed form of value. */ setRawValueAt: function(index, value) { this.getDataItems()[index] = value; return this; }, /** @private */ setHashValueAt: function(index, value, options) { var aValue = this._itemHashToArray(value); this.setRawValueAt(index, aValue); return this; }, /** * Set the data value at index. * @param {Int} index * @param {Variant} value Value in hash or array form. */ setValueAt: function(index, value, options) { var d; if (!DataType.isArrayValue(value)) // is hash value, convert it to array first d = this._itemHashToArray(value); else d = value; this.setRawValueAt(index, d); return this; }, /** * Get the extra information of a data value. * @param {Variant} value Data value in hash or array form. * @returns {Hash} */ getExtraInfoOf: function(value) { return value._extra; }, /** * Set the extra information of a data value. * @param {Variant} value Data value in hash or array form. * @param {Hash} info */ setExtraInfoOf: function(value, info) { value._extra = info; return this; }, /** * Get the extra information of data value at index. * @param {Int} index * @returns {Hash} */ getExtraInfoAt: function(index) { var d = this.getDataItems()[index]; return d && d._extra; }, /** * Set the extra information of data value at index. * @param {Int} index * @param {Hash} info */ setExtraInfoAt: function(index, info) { var d = this.getDataItems()[index]; d._extra = info; return this; }, /** * Returns the peak root value of data item value. * @param {Hash} value * @returns {Hash} */ getPeakRootValueOf: function(value) { if (this.getMode() !== Kekule.Spectroscopy.DataMode.PEAK) return null; else { var pr = this.getDefPeakRoot() || this._getDefaultPeakRoot(); return Object.extend(Object.extend({}, value), pr); } }, /** * Returns the peak root value of data item at index. * @param {Int} index * @returns {Hash} */ getPeakRootValueAt: function(index) { return this.getPeakRootValueOf(this.getValueAt(index)); }, /** * Calculate values of dependant variable values from independent variable values. * @param {Hash} independentValues * @param {Hash} extraOptions * @returns {Hash} */ getDependentValues: function(independentValues, extraOptions) { return this.doGetDependentValues(independantValues, extraOptions); }, /** * Do actual work of {@link Kekule.Spectroscopy.SpectrumData.getDependentValues}. * Descendants should override this method. * @param {Hash} independentValues * @param {Hash} extraOptions * @returns {Hash} * @private */ doGetDependentValues: function(independentValues, extraOptions) { return {}; }, /** * Returns an iterator to iterate all data in this object. * If iterator is not available, null should be returned. * Otherwise, the return value should be an object with method next(): {done, value}. * @returns {Object} */ getIterator: function() { return this.doGetIterator(); }, /** * Do actual work of {@link Kekule.Spectroscopy.SpectrumData.getIterator}. * Desendants may override this method. * @returns {Object} * @private */ doGetIterator: function() { var dataItems = this.getDataItems(); var self = this; var result = { index: 0, next: function(options) { if (this.index >= dataItems.length) return {'done': true}; else { var ret = {'done': false, 'value': /*self._itemArrayToHash(dataItems[this.index])*/self.getHashValueAt(this.index, options)}; ++this.index; return ret; } } }; return result; }, /** * Call function to each data item. * @param {Func} func With params: (hashValue [, index, options]). */ forEach: function(func, thisArg, options) { var iterator = this.getIterator(); if (iterator) { //var dataItems = this.getDataItems(); var index = 0; var nextItem = iterator.next(options); while (!nextItem.done) { func.apply(thisArg, [nextItem.value, index]); ++index; nextItem = iterator.next(); } } return this; } }); /** * The base spectrum data class. * The concrete data can be stored in different forms, implemented in different descendant classes. * @class * @augments ObjectEx * * @param {String} id * @param {Array} variables Array of variables of data, each item is {@link Kekule.Spectroscopy.SpectrumVarDefinition}. * * @property {Array} variables Array of variables of data, each item is {@link Kekule.Spectroscopy.SpectrumVarDefinition}. * @property {Kekule.ChemObjList} sections Child data sections. * @property {Kekule.Spectroscopy.SpectrumData} activeSection Active data section to read/write data. * @property {Bool} autoCreateSection Whether create a initial data section automatically when inserting data. */ Kekule.Spectroscopy.SpectrumData = Class.create(ObjectEx, /** @lends Kekule.Spectroscopy.SpectrumData# */ { /** @private */ CLASS_NAME: 'Kekule.Spectroscopy.SpectrumData', /** @private */ initialize: function (id, variables, parent) { //this.setPropStoreFieldValue('dataItems', []); this.tryApplySuper('initialize', [id]); this.setPropStoreFieldValue('variables', variables ? AU.clone(variables) : []); var sections = new Kekule.ChemObjList(null, Kekule.Spectroscopy.SpectrumDataSection, true); this.setPropStoreFieldValue('sections', sections); this.setParent(parent); sections.setOwner(this.getOwner()); //this.createSection(this.getVariables()); // create a default section }, doFinalize: function () { //this.clear(); this.getSections().finalize(); var variables = this.getVariables() || []; for (var i = 0, l = variables.length; i < l; ++i) { variables[i].finalize(); } this.setPropStoreFieldValue('variables', null); this.tryApplySuper('doFinalize'); }, /** @private */ initProperties: function () { this.defineProp('owner', { 'dataType': 'Kekule.ChemSpace', 'serializable': false, 'scope': Class.PropertyScope.PUBLIC, 'getter': function() { return this.getPropStoreFieldValue('owner') || this._getDefaultOwner(); }, 'setter': function(value) { if (value !== this.getPropStoreFieldValue('owner')) { this.setPropStoreFieldValue('owner', value); var newOwner = this.getOwner(); var sections = this.getSections(); if (sections) sections.setOwner(newOwner); } } }); this.defineProp('parent', { 'dataType': 'Kekule.Spectroscopy.Spectrum', 'serializable': false, 'setter': function (value) { this.setPropStoreFieldValue('parent', value); var sections = this.getSections(); if (sections) sections.setParent(this.getParent() || this); } }); this.defineProp('sections', { 'dataType': 'Kekule.ChemObjList', 'setter': function (value) { var old = this.getSections(); if (old !== value) { if (old) { old.finalize(); } if (value) { value._transparent = true; // force the obj list be transparent value.setParent(this.getParent() || this); value.setOwner(this.getOwner()); } this.setPropStoreFieldValue('sections', value); } } }); this.defineProp('autoCreateSection', {'dataType': DataType.BOOL}); this.defineProp('activeSectionIndex', { 'dataType': DataType.INT, 'getter': function () { if (this.getSectionCount() <= 0) return -1; else if (this.getSectionCount() === 1) // only one section, it should be activated by default return 0; else return this.getPropStoreFieldValue('activeSectionIndex'); }, 'setter': function (value) { if (value >= 0 && value <= this.getSectionCount()) this.setPropStoreFieldValue('activeSectionIndex', value); } }); this.defineProp('activeSection', { 'dataType': 'Kekule.Spectroscopy.SpectrumDataSection', 'serializable': false, 'getter': function () { var result = this.getSectionAt(this.getActiveSectionIndex() || 0); if (!result && this.getSectionCount() <= 0 && this.getAutoCreateSection()) { result = this.createSection(this.getVariables()); //console.log('auto create'); } return result; }, 'setter': function (value) { this.setActiveSectionIndex(this.indexOfSection(value)); } }); /* this.defineProp('variables', {'dataType': DataType.ARRAY, 'setter': null, 'serializable': false, 'getter': function() { var result = []; for (var i = 0, l = this.getSectionCount(); i < l; ++i) { var vars = this.getSectionAt(i).getVariables(); AU.pushUnique(result, vars); } return result; } }); */ this.defineProp('variables', {'dataType': DataType.ARRAY/*, 'setter': null*/}); // private, stores the data items, each item is a hash, e.g. {x: 1, y: 10, w: 2} //this.defineProp('dataItems', {'dataType': DataType.ARRAY, 'setter': null, 'scope': PS.PRIVATE}); // private, cache all variable names this.defineProp('varSymbols', { 'dataType': DataType.ARRAY, 'setter': null, 'scope': PS.PRIVATE, 'getter': function () { var result = []; var list = this.getVariables() || []; for (var j = 0, jj = list.length; j < jj; ++j) { var varDef = list[j]; result.push(varDef.getSymbol()); } return result; } }); this.defineProp('mode', {'dataType': DataType.INT, 'enumSource': Kekule.Spectroscopy.DataMode}); }, /** @ignore */ initPropValues: function () { this.tryApplySuper('initPropValues'); this.setAutoCreateSection(true); this.setMode(Kekule.Spectroscopy.DataMode.CONTINUOUS); }, /** @private */ _getDefaultOwner: function() { var parent = this.getParent(); return parent && parent.getOwner && parent.getOwner(); // always returns the owner of parent spectrum }, /** @private */ getHigherLevelObj: function () { return this.getParent(); }, /** @ignore */ getChildHolder: function () { return this.getSections(); }, /** @ignore */ loaded: function(/*$super*/) { var sections = this.getSections(); if (sections) { sections.parentChanged(this); sections.ownerChanged(this.getOwner()); } this.tryApplySuper('loaded'); }, /** * Create and append a new {@link Kekule.Spectroscopy.SpectrumDataSection}. * @param {Array} variables Array of local variable symbol or definition used by secion. * @param {Int} mode * @returns {Kekule.Spectroscopy.SpectrumDataSection} */ createSection: function (variables, mode) { var result = new Kekule.Spectroscopy.SpectrumDataSection(null, this, variables); //result.setVariables(variables); result.setMode(mode || this.getMode()); this.getSections().appendChild(result); return result; }, /** * Remove all data sections. */ clearSection: function () { var sections = this.getChildren(); for (var i = 0, l = sections.length; i < l; ++i) { sections[i].clear(); sections[i].setParent(null); sections[i].finalize(); } this.getSections().clear(); }, /** * Get count of child data sections. * @returns {Int} */ getSectionCount: function () { return this.getSections().getChildCount(); }, /** * Get child data sectionb at index. * @param {Int} index * @returns {Kekule.Spectroscopy.SpectrumDataSection} */ getSectionAt: function (index) { return this.getSections().getItemAt(index); }, /** * Get the index of child section in children list. * @param {Kekule.Spectroscopy.SpectrumDataSection} section * @returns {Int} Index of section or -1 when not found. */ indexOfSection: function (section) { return this.getSections().indexOfItem(section); }, /** * Check if section is in this spectrum data. * @param {Kekule.Spectroscopy.SpectrumDataSection} section * @returns {Bool} */ hasSection: function (section) { return this.indexOfSection(section) >= 0; }, /** * Remove a data section at index. * @param {Int} index * @returns {Kekule.Spectroscopy.SpectrumDataSection} Child section removed. */ removeSectionAt: function (index) { return this.getSections().removeItemAt(index); }, /** * Remove a child data section. * @param {Kekule.Spectroscopy.SpectrumDataSection} section * @returns {Kekule.Spectroscopy.SpectrumDataSection} Section object removed. */ removeSection: function (section) { return this.getSections().removeItem(section); }, /** * Insert a new section to index. * @param {Kekule.Spectroscopy.SpectrumDataSection} section * @param {Int} index * @return {Int} Index of section after insertion. */ insertSectionAt: function (section, index) { return this.getSections().insertItemAt(section, index); }, /** * Insert a data section before refSection in data section list. * @param {Kekule.Spectroscopy.SpectrumDataSection} obj * @param {Kekule.Spectroscopy.SpectrumDataSection} refChildr * @return {Int} Index of section after insertion. */ insertSectionBefore: function (section, refSection) { return this.getSections().insertItemBefore(section, refSection); }, /** * Add new data section to the tail of section list. * @param {Kekule.Spectroscopy.SpectrumDataSection} section * @return {Int} Index of obj after appending. */ appendSection: function (section) { return this.getSections().appendChild(section); }, /** * Returns whether multiple sections exists in this spectrum data. * @returns {Bool} */ hasMultipleSections: function () { return this.getSections().getChildCount() > 1; }, /** * Iterate all data items in a section and calculate the min/max value of each variable. * @param {Kekule.Spectroscopy.SpectrumDataSection} section * @param {Array} targetVariables Array of variable definition or symbol. * If not set, all variables will be calculated. * @returns {Hash} */ calcDataRangeOfSection: function (section, targetVariables) { return section.calcDataRange(targetVariables); }, /** * Iterate all data items in a set of sections and calculate the min/max value of each variable. * @param {Array} sections * @param {Array} targetVariables Array of variable definition or symbol. * If not set, all variables will be calculated. * @returns {Hash} */ calcDataRangeOfSections: function (sections, targetVariables) { var result = {}; for (var i = 0, l = sections.length; i < l; ++i) { var range = sections[i].calcDataRange(targetVariables); result = Kekule.Spectroscopy.Utils.mergeDataRange(result, range); } return result; }, /** * Returns the display range of a section. * @param {Kekule.Spectroscopy.SpectrumDataSection} section * @param {Array} targetVariables Array of variable definition or symbol. * If not set, all variables will be calculated. * @param {Hash} options May include fields: * { * autoCalc: Bool. If true, when explicit display range is not set, the number range of variable will be calculated and returned. * basedOnInternalUnit: Bool. If true, the returned value will be based on internal unit rather than the external unit of variable. * } * @returns {Hash} */ getDisplayRangeOfSection: function (section, targetVariables, options) { return section.getDisplayRangeOfVars(targetVariables, options); }, /** * Returns the display range of a set of sections. * @param {Array} sections * @param {Array} targetVariables Array of variable definition or symbol. * If not set, all variables will be calculated. * @param {Hash} options May include fields: * { * autoCalc: Bool. If true, when explicit display range is not set, the number range of variable will be calculated and returned. * basedOnInternalUnit: Bool. If true, the returned value will be based on internal unit rather than the external unit of variable. * } * @returns {Hash} */ getDisplayRangeOfSections: function (sections, targetVariables, options) { var result = {}; for (var i = 0, l = sections.length; i < l; ++i) { var range = sections[i].getDisplayRangeOfVars(targetVariables, options); result = Kekule.Spectroscopy.Utils.mergeDataRange(result, range); } return result; }, /** * Returns count of all variables. * @returns {Int} */ getVariableCount: function () { return (this.getVariables() || []).length; }, /** * Returns the variable definition by a index or variable name. * @param {Variant} varIndexOrNameOrDef * @returns {Kekule.Spectroscopy.SpectrumVarDefinition} */ getVariable: function (varIndexOrNameOrDef) { var varDef = (varIndexOrNameOrDef instanceof Kekule.VarDefinition) ? varIndexOrNameOrDef : (typeof (varIndexOrNameOrDef) === 'number') ? this.getVariables()[varIndexOrNameOrDef] : // index this.getVariables()[this.getVarSymbols().indexOf(varIndexOrNameOrDef)]; // name return varDef; }, /** * Returns the index of a variable definition. * @param {Kekule.Spectroscopy.SpectrumVarDefinition} varDef * @returns {Int} */ indexOfVariable: function (varDef) { return this.getVariables().indexOf(varDef); }, /** * Insert a new variable definition at a specified position. * @param {Kekule.Spectroscopy.SpectrumVarDefinition} varDef * @param {Int} index */ insertVariableAt: function (varDef, index) { if (index >= 0) this.getVariables().splice(index, 0, varDef); else this.getVariables().push(varDef); return this; }, /** * Insert a new variable definition before ref. * @param {Kekule.Spectroscopy.SpectrumVarDefinition} varDef * @param {Kekule.Spectroscopy.SpectrumVarDefinition} ref */ insertVariableBefore: function (varDef, ref) { var index = ref ? this.indexOfVarDefinition(ref) : -1; return this.insertVarDefinitionAt(varDef, index); }, /** * Append a new variable definition. * @param {Kekule.Spectroscopy.SpectrumVarDefinition} varDef */ appendVariable: function (varDef) { return this.insertVariableAt(varDef, -1); }, /** * Remove a variable definition at index. * @param {Int} index */ removeVariableAt: function (index) { this.getVariables().splice(index, 1); return this; }, /** * Remove a variable definition. * @param {Kekule.Spectroscopy.SpectrumVarDefinition} varDef */ removeVariable: function (varDef) { var index = this.indexOfVariable(varDef); if (index >= 0) this.removeVariableAt(index); return this; }, /** * Returns variables of certain dependency. * @param {Int} dependency Value from {@link Kekule.VarDependency} * @returns {Array} Array of var definition. */ getVariablesOfDependency: function (dependency) { var result = []; for (var i = 0, l = this.getVariableCount(); i < l; ++i) { var varDef = this.getVariable(i); if (varDef && varDef.getDependency() === dependency) result.push(varDef); } return result; }, /** * Returns the first/last value of a continuous variable. * @param {Variant} varNameOrIndexOrDef * @returns {Hash} Hash of {firstValue, lastValue} */ getContinuousVarRange: function (varIndexOrNameOrDef) { var varDef = this.getVariable(varIndexOrNameOrDef); var info = varDef && varDef.getInfo(); if (info) { if (info.continuous) { //var count = this.getDateItemCount(); return { 'fromValue': info.fromValue, 'toValue': info.toValue /*, 'interval': (info.lastValue - info.firstValue) / count */ }; } } return null; }, /** * Set the first/last value of a variable and mark it as a continuous one. * @param {Variant} varNameOrIndexOrDef * @param {Number} fromValue * @param {Number} toValue */ setContinuousVarRange: function (varIndexOrNameOrDef, fromValue, toValue) { var varDef = this.getVariable(varIndexOrNameOrDef); var info = varDef && varDef.getInfo(true); info.continuous = true; info.fromValue = fromValue; info.toValue = toValue; return this; }, /** * Remove the continuous information of a variable. * @param {Variant} varIndexOrNameOrDef */ clearContinuousVarRange: function (varIndexOrNameOrDef) { var varDef = this.getVariable(varIndexOrNameOrDef); var info = varDef.getInfo(); if (info && info.continuous) info.continuous = false; return this; }, /** * Set the default value of a variable when the concrete value in spectrum is absent. * E.g., in many NMR peak spectrums, y value will be omitted, and this method will provide a default one for it. * @param {Variant} varIndexOrNameOrDef * @param {Number} value */ setDefaultVarValue: function (varIndexOrNameOrDef, value) { var varDef = this.getVariable(varIndexOrNameOrDef); var info = varDef && varDef.getInfo(true); info.defaultValue = value; return this; }, /** * Clear the default value of a variable. * @param {Variant} varIndexOrNameOrDef */ clearDefaultVarValue: function(varIndexOrNameOrDef) { return this.setDefaultVarValue(varIndexOrNameOrDef, null); }, /** * Get the default value of a variable when the concrete value in spectrum is absent. * E.g., in many NMR peak spectrums, y value will be omitted, and this method will provide a default one for it. * @param {Variant} varIndexOrNameOrDef * @returns {Number} */ getDefaultVarValue: function(varIndexOrNameOrDef) { var varDef = this.getVariable(varIndexOrNameOrDef); var info = varDef && varDef.getInfo(); if (info) { return info.defaultValue; } }, /** * Iterate all child sections and execute function. * @param {Function} func Function with param (section, index). */ iterateSections: function(func) { for (var i = 0, l = this.getSectionCount(); i < l; ++i) { func(this.getSectionAt(i), i); } }, /** * Sort all data items. * @param {Func} func Optional, func(hash1, hash2). If not set, data items will be sorted by default method. */ sort: function(func) { this.iterateSections(function(c){ c.sort(func); }); }, /** * Returns the count of data items. * @returns {Int} */ getDataCount: function() { var result = 0; this.iterateSections(function(c){ result += c.getDataCount(); }); return result; }, /** * Clear all data items in all data sections. */ clearData: function() { this.iterateSections(function(c){ c.clear(); }); }, /** * Add new data item to active data section. The item is can be a hash or an array. * If it is a hash, the hash fields must matches {@link Kekule.Spectroscopy.SpectrumData.independentVars} and {@link Kekule.Spectroscopy.SpectrumData.dependentVars}. * If it is an array, the values in array will automatically mapped to independent and dependent vars. * @param {Variant} item */ appendData: function(item) { return this.getActiveSection().appendData(item); }, /** * Remove a data item from active data section. * @param {Array} item */ removeData: function(item) { return this.getActiveSection().removeData(item); }, /** * Remove a data item at index in current active section. * @param {Int} index */ removeDataAt: function(index) { return this.getActiveSection().removeDataAt(index); }, /** * Get the data value at index in current active section. * @param {Int} index * @returns {Array} The arrayed form of value. */ getRawValueAt: function(index) { return this.getActiveSection().getRawValueAt(index); }, /** @private */ getHashValueAt: function(index) { return this.getActiveSection().getHashValueAt(index); }, /** * Get the data value at index in current active section. * @param {Int} index * @returns {Hash} The hashed form of value. */ getValueAt: function(index) { return this.getHashValueAt(index); }, /** * Set the data value at index in current active section. * @param {Int} index * @param {Array} The array form of value. */ setRawValueAt: function(index, value) { this.getActiveSection().setRawValueAt(index, value); return this; }, /** @private */ setHashValueAt: function(index, value, options) { this.getActiveSection().setHashValueAt(index, value, options); return this; }, /** * Set the data value at index in current active section. * @param {Int} index * @param {Variant} value Value in hash or array form. */ setValueAt: function(index, value, options) { this.getActiveSection().setValueAt(index, value); return this; }, /** * Get the extra information of a data value in current active section. * @param {Variant} value Data value in hash or array form. * @returns {Hash} */ getExtraInfoOf: function(value) { return this.getActiveSection().getExtraInfoOf(value); }, /** * Set the extra information of a data value in current active section. * @param {Variant} value Data value in hash or array form. * @param {Hash} info */ setExtraInfoOf: function(value, info) { this.getActiveSection().setExtraInfoOf(value, info); return this; }, /** * Get the extra information of data value at index of current active section. * @param {Int} index * @returns {Hash} */ getExtraInfoAt: function(index) { return this.getActiveSection().getExtraInfoAt(index); }, /** * Set the extra information of data value at index of current active section. * @param {Int} index * @param {Hash} info */ setExtraInfoAt: function(index, info) { this.getActiveSection().setExtraInfoAt(index, info); return this; }, /** * Calculate values of dependant variable values from independent variable values. * @param {Hash} independentValues * @param {Hash} extraOptions * @returns {Hash} */ getDependentValues: function(independentValues, extraOptions) { return this.doGetDependentValues(independantValues, extraOptions); }, /** * Do actual work of {@link Kekule.Spectroscopy.SpectrumData.getDependentValues}. * Descendants should override this method. * @param {Hash} independentValues * @param {Hash} extraOptions * @returns {Hash} * @private */ doGetDependentValues: function(independentValues, extraOptions) { // TODO: unfinished return {}; }, /** * Returns an iterator to iterate all data in this object. * If iterator is not available, null should be returned. * Otherwise, the return value should be an object with method next(): {done, value}. * @returns {Object} */ getIterator: function() { return this.doGetIterator(); }, /** * Do actual work of {@link Kekule.Spectroscopy.SpectrumData.getIterator}. * Desendants may override this method. * @returns {Object} * @private */ doGetIterator: function() { //var dataItems = this.getDataItems(); var sections = this.getSections().getItems(); var self = this; var result = { sectionIndex: 0, index: 0, next: function() { var self = this; var outOfRange = function() { return (self.sectionIndex >= sections.length || (self.sectionIndex === sections.length - 1 && self.index >= sections[sections.length - 1].getDataCount())); } if (outOfRange()) return {'done': true}; else { if (this.index < sections[this.sectionIndex].getDataCount()) { var ret = {'done': false, 'value': sections[this.sectionIndex].getValueAt(this.index)}; ++this.index; } else { do { ++this.sectionIndex; this.index = 0; } while(this.index >= sections[this.sectionIndex].getDataCount() || self.sectionIndex >= sections.length); if (outOfRange()) return {'done': true}; else return {'done': false, 'value': sections[this.sectionIndex].getValueAt(this.index)}; } return ret; } } }; return result; }, /** * Call function to each data item. * @param {Func} func With params: (hashValue [, index]). */ forEach: function(func, thisArg) { var iterator = this.getIterator(); if (iterator) { var index = 0; var nextItem = iterator.next(); while (!nextItem.done) { func.apply(thisArg, [nextItem.value, index]); ++index; nextItem = iterator.next(); } } return this; } }); /** * Spectrum peak shape enumeration. * @enum */ Kekule.Spectroscopy.PeakShape = { SHARP: 'sharp', BROAD: 'broad' }; /** * Spectrum peak multiplicity enumeration. * @enum */ Kekule.Spectroscopy.PeakMultiplicity = { UNKNOWN: 0, SINGLET: 1, DOUBLET: 2, TRIPLET: 3, QUARTET: 4, QUINTET: 5, SEXTUPLET: 6, MULTIPLET: 255, }; /** * A special class to store the additional peak information (e.g. the assignment ref object of peak). * @class * @augments Kekule.ChemObject * * @param {Hash} params * * @property {Kekule.ChemObject} assignment The assignment target of peak, ususally an atom or a bond. * @property {String} shape Shape of peak, usually be set with value from {@link Kekule.Spectroscopy.PeakShape}. * @property {VARIANT} multiplicity Multiplicity of peak. * Usually be set with value from {@link Kekule.Spectroscopy.PeakMultiplicity}, but a custom string value (e.g. 'triplet121') is also allowed. */ Kekule.Spectroscopy.SpectrumPeakDetails = Class.create(Kekule.ChemObject, /** @lends Kekule.Spectroscopy.SpectrumPeakDetails# */ { /** @private */ CLASS_NAME: 'Kekule.Spectroscopy.SpectrumPeakDetails', /** @private */ initialize: function (params) { this.tryApplySuper('initialize', []); this.setPropValues(params); }, /** @private */ initProperties: function() { this.defineProp('assignment', {'dataType': 'Kekule.ChemObject', 'objRef': true, 'autoUpdate': true}); this.defineProp('shape', {'dataType': DataType.STRING}); this.defineProp('multiplicity', {'dataType': DataType.VARIANT}); } }); /** * Enumeration of spectrum types. * @enum */ Kekule.Spectroscopy.SpectrumType = { NMR: 'NMR', IR: 'IR', MS: 'MS', UV_VIS: 'UV_VIS', IMS: 'IMS', // ION MOBILITY SPECTRUM RAMAN: 'Raman', CHROMATOGRAPHY: 'chromatography', GENERAL: 'general' // unknown type }; /** * Some constants used by NMR spectrum. * @object */ Kekule.Spectroscopy.SpectrumNMR = { TargetNucleus: { C13: 'C13', H: 'H' } }; /** * Some constants used by MS spectrum. * @object */ Kekule.Spectroscopy.SpectrumMS = { SpectrometerType: { } }; /** * The base spectrum class. Concrete spectrum classes should be inherited from this one. * @class * @augments Kekule.ChemObject * * @property {String} spectrumType Type of spectrum, value from {@link Kekule.Spectroscopy.SpectrumType}. * @property {String} name Name of spectrum. * @property {String} title Title of spectrum. * @property {Hash} metaData Meta information of spectrum. * @property {Hash} conditions Conditions of spectrum. * @property {Hash} parameters Important parameters of spectrum. * @property {Hash} annotations Additional annotations of spectrum. * @property {Kekule.Spectroscopy.SpectrumData} data Spectrum data. * @property {Hash} spectrumParams Key spectrum parameters, e,g. the frequency of NMR. */ Kekule.Spectroscopy.Spectrum = Class.create(Kekule.ChemObject, /** @lends Kekule.Spectroscopy.Spectrum# */ { /** @private */ CLASS_NAME: 'Kekule.Spectroscopy.Spectrum', /** @private */ initialize: function(id) { this.setPropStoreFieldValue('data', new Kekule.Spectroscopy.SpectrumData(null, null, this)); this.tryApplySuper('initialize', [id]); this._initDelegatedMethods(); }, /** @ignore */ doFinalize: function() { var d = this.getData(); if (d) d.finalize(); this.tryApplySuper('doFinalize'); }, /** @private */ initProperties: function() { this.defineProp('spectrumType', {'dataType': DataType.STRING}); this.defineProp('name', {'dataType': DataType.STRING}); //this.defineProp('title', {'dataType': DataType.STRING}); this.defineProp('data', {'dataType': 'Kekule.Spectroscopy.SpectrumData', 'setter': function(value) { var old = this.getData(); if (value !== old) { if (old) { old.finalize(); } if (value) { value.setPropValue('parent', this, true); } this.setPropStoreFieldValue('data', value); } } }); /* this.defineProp('spectrumParams', { 'dataType': DataType.HASH, 'getter': function(canCreate) { var r = this.getPropStoreFieldValue('spectrumParams'); if ((!r) && canCreate) { r = {}; this.setPropStoreFieldValue('spectrumParams', r); } return r; }, 'setter': null }); */ this._defineInfoProperty('title'); this._defineInfoProperty('metaData', null, {'dataType': DataType.HASH}); this._defineInfoProperty('conditions', null, {'dataType': DataType.HASH}); this._defineInfoProperty('parameters', null, {'dataType': DataType.HASH}); this._defineInfoProperty('annotations', null, {'dataType': DataType.HASH}); this._defineDataDelegatedProperty('variables'); this._defineDataDelegatedProperty('dataSections', 'sections'); this._defineDataDelegatedProperty('activeDataSectionIndex', 'activeSectionIndex'); this._defineDataDelegatedProperty('activeDataSection', 'activeSection'); }, /** @private */ _initDelegatedMethods: function() { this._defineDataDelegatedMethod('createDataSection', 'createSection'); this._defineDataDelegatedMethod('clearDataSection', 'clearSection'); this._defineDataDelegatedMethod('getDataSectionCount', 'getSectionCount'); this._defineDataDelegatedMethod('getDataSectionAt', 'getSectionAt'); this._defineDataDelegatedMethod('indexOfDataSection', 'indexOfSection'); this._defineDataDelegatedMethod('hasDataSection', 'hasSection'); this._defineDataDelegatedMethod('removeDataSectionAt', 'removeSectionAt'); this._defineDataDelegatedMethod('removeDataSection', 'removeSection'); this._defineDataDelegatedMethod('insertDataSectionAt', 'insertSectionAt'); this._defineDataDelegatedMethod('insertDataSectionBefore', 'insertSectionBefore'); this._defineDataDelegatedMethod('appendDataSection', 'appendSection'); this._defineDataDelegatedMethod('iterateDataSection', 'iterateSection'); this._defineDataDelegatedMethod('sortData', 'sort'); this._defineDataDelegatedMethod('clearData'); this._defineDataDelegatedMethod('getVariable'); this._defineDataDelegatedMethod('indexOfVariable'); this._defineDataDelegatedMethod('insertVariableAt'); this._defineDataDelegatedMethod('insertVariableBefore'); this._defineDataDelegatedMethod('appendVariable'); this._defineDataDelegatedMethod('removeVariableAt'); this._defineDataDelegatedMethod('removeVariable'); this._defineDataDelegatedMethod('getVariablesOfDependency'); this._defineDataDelegatedMethod('getContinuousVarRange'); this._defineDataDelegatedMethod('setContinuousVarRange'); this._defineDataDelegatedMethod('clearContinuousVarRange'); this._defineDataDelegatedMethod('getDefaultVarValue'); this._defineDataDelegatedMethod('setDefaultVarValue'); this._defineDataDelegatedMethod('clearDefaultVarValue'); }, /** * Defines property which storing value in {@link Kekule.ChemObject.info}. * @param {String} propName * @param {String} infoFieldName * @param {Hash} options * @private */ _defineInfoProperty: function(propName, infoFieldName, options) { var defs; (function() { defs = Object.extend({ 'getter': function () { return this.getInfoValue(infoFieldName || propName); }, 'setter': function(value) { this.setInfoValue(infoFieldName || propName, value); }, 'serializable': false }, options); })(); return this.defineProp(propName, defs); }, /** * Defines property which reflecting the property values in {@link Kekule.Spectroscopy.Spectrum.data}. * @param {String} propName * @param {String} dataPropName * @private */ _defineDataDelegatedProperty: function(propName, dataPropName) { if (!dataPropName) dataPropName = propName; var dataPropInfo = ClassEx.getPropInfo(Kekule.Spectroscopy.SpectrumData, dataPropName); var propOptions = Object.create(dataPropInfo); propOptions.getter = null; propOptions.setter = null; propOptions.serializable = false; if (dataPropInfo.getter) { propOptions.getter = function() { return this.getData().getPropValue(dataPropName); }; } if (dataPropInfo.setter) { propOptions.setter = function(value) { this.getData().setPropValue(dataPropName, value); } } return this.defineProp(propName, propOptions); }, /** * Defines method which directly calling the corresponding one in {@link Kekule.Spectroscopy.Spectrum.data}. * @param {String} methodName * @param {String} dataMethodName * @private */ _defineDataDelegatedMethod: function(methodName, dataMethodName) { if (!dataMethodName) dataMethodName = methodName; var proto = ClassEx.getPrototype(this.getClass()); proto[methodName] = function() { //console.log('call', methodName, arguments); return this.getData()[dataMethodName].apply(this.getData(), arguments); } }, /** @private */ ownerChanged: function(/*$super, */newOwner, oldOwner) { // change the owner of child data and sections var data = this.getData(); if (data) data.setOwner(newOwner); this.tryApplySuper('ownerChanged', [newOwner, oldOwner]); }, /** @private */ _getInfoBasedHashPropValue: function(infoKeyName, propName) { var hash = this.getInfoValue(infoKeyName); return hash && hash[propName]; }, /** @private */ _setInfoBasedHashpropValue: function(infoKeyName, propName, value) { var hash = this.getInfoValue(infoKeyName); if (!hash) { hash = {}; this.setInfoValue(infoKeyName, hash); } hash[propName] = value; }, /** @private */ _getAllKeysOfInfoBasedHashProp: function(infoKeyName) { var hash = this.getInfoValue(infoKeyName); return hash? Kekule.ObjUtils.getOwnedFieldNames(hash, false): []; }, /** * Returns value of spectrum meta/condition/parameter/annotation. * @param {String} key * @param {Array} candicateCategories * @returns {Variant} */ getSpectrumInfoValue: function(key, candicateCategories) { if (!candicateCategories) candicateCategories = ['metaData', 'conditions', 'parameters', 'annotations']; for (var i = 0, l = candicateCategories.length; i < l; ++i) { var c = candicateCategories[i]; var v = this._getInfoBasedHashPropValue(c, key); if (Kekule.ObjUtils.notUnset(v)) return v; } return undefined; }, /** * Returns the value of a spectrum meta data. * @param {String} key * @returns {Variant} */ getMeta: function(key) { return this._getInfoBasedHashPropValue('metaData', key); }, /** * Set the value of a spectrum meta data. * @param {String} key * @param {Variant} value */ setMeta: function(key, value) { this._setInfoBasedHashpropValue('metaData', key, value); return this; }, /** * Returns the value of a spectrum condition. * @param {String} key * @returns {Variant} */ getCondition: function(key) { return this._getInfoBasedHashPropValue('conditions', key); }, /** * Set the value of a spectrum condition. * @param {String} key * @param {Variant} value */ setCondition: function(key, value) { this._setInfoBasedHashpropValue('conditions', key, value); return this; }, /** * Returns all the keys of spectrum condition list. * @returns {Array} */ getConditionKeys: function() { return this._getAllKeysOfInfoBasedHashProp('conditions'); }, /** * Returns the value of a spectrum parameter. * @param {String} key * @returns {Variant} */ getParameter: function(key) { return this._getInfoBasedHashPropValue('parameters', key); }, /** * Set the value of a spectrum parameter. * @param {String} key * @param {Variant} value */ setParameter: function(key, value) { this._setInfoBasedHashpropValue('parameters', key, value); return this; }, /** * Returns all the keys of spectrum parameter list. * @returns {Array} */ getParameterKeys: function() { return this._getAllKeysOfInfoBasedHashProp('parameters'); }, /** * Returns the value of a spectrum annotation. * @param {String} key * @returns {Variant} */ getAnnotation: function(key) { return this._getInfoBasedHashPropValue('annotations', key); }, /** * Set the value of a spectrum annotation. * @param {String} key * @param {Variant} value */ setAnnotation: function(key, value) { this._setInfoBasedHashpropValue('annotations', key, value); return this; }, /** * Returns all the keys of spectrum annotation list. * @returns {Array} */ getAnnotationKeys: function() { return this._getAllKeysOfInfoBasedHashProp('annotations'); }, /* * Create the data object. * @param variables * @returns {Kekule.Spectroscopy.SpectrumData} */ /* createData: function(variables) { var result = new Kekule.Spectroscopy.SpectrumData(null, variables); this.setPropStoreFieldValue('data', result); return result; } */ /** * Returns the recommended external units that can be converted from internal unit for this variable. * @param {Kekule.Spectroscopy.SpectrumVarDefinition} varDef * @returns {Array} Array of unit objects. */ getVarAvailableExternalUnitObjs: function(varDef) { return Kekule.Spectroscopy.DataValueConverterManager.getAltUnits(varDef, varDef.getInternalUnit? varDef.getInternalUnit(): varDef.getUnit(), null, this); }, /** * Returns the recommended external units that can be converted from internal unit for this variable. * @param {Kekule.Spectroscopy.SpectrumVarDefinition} varDef * @returns {Array} Array of unit symbols (string). */ getVarAvailableExternalUnitSymbols: function(varDef) { var unitObjs = Kekule.Spectroscopy.DataValueConverterManager.getAltUnits(varDef, varDef.getInternalUnit? varDef.getInternalUnit(): varDef.getUnit(), null, this); var result = []; for (var i = 0, l = unitObjs.length; i < l; ++i) { result.push(unitObjs[i].symbol); } return result; }, /* * Returns all keys in {@link Kekule.Spectroscopy.Spectrum#spectrumParams} property. * @returns {Array} */ /* getSpectrumParamKeys: function() { return this.getSpectrumParams()? Kekule.ObjUtils.getOwnedFieldNames(this.getSpectrumParams()): []; }, */ /* * Get param value from {@link Kekule.Spectroscopy.Spectrum#spectrumParams}. * @param {String} key * @returns {Variant} */ /* getSpectrumParam: function(key) { return this.getSpectrumParams()? this.getSpectrumParams()[key]: null; }, */ /* * Set value of a spectrum param. If key already exists, its value will be overwritten. * @param {String} key * @param {Variant} value */ /* setSpectrumParam: function(key, value) { this.doGetSpectrumParams(true)[key] = value; this.notifyPropSet('spectrumParams', this.getPropStoreFieldValue('spectrumParams')); } */ }); Kekule.ClassDefineUtils.addStandardCoordSupport(Kekule.Spectroscopy.Spectrum); Kekule.ClassDefineUtils.addStandardSizeSupport(Kekule.Spectroscopy.Spectrum); // register spectrum related units (function(){ var register = Kekule.Unit.register; // IR register('transmittance', 'transmittance', 'OpticalTransmittance', 1); // IT/I0 register('transmittance%', 'transmittance_percent', 'OpticalTransmittance', 1e-2); // IT/I0 register('reflectance', 'reflectance', 'OpticalReflectance', 1); // IR/I0 register('absorbance', 'absorbance', 'OpticalAbsorbance', 1); // log10(IR/I0) register('Kubelka Munk', 'Kubelka_Munk', 'OpticalKubelkaMunk', 1); // (1-R^2)/(2R) // MS register('counts', 'ms_count', 'Misc', null); register('relative abundance', 'ms_relative_abundance', 'SpectrumMS', null); register('m/z', 'ms_mass_charge_ratio', 'SpectrumMS', null); })(); })();
Set owner/parent of extra info object when adding it to spectrum data
src/spectroscopy/kekule.spectrum.core.js
Set owner/parent of extra info object when adding it to spectrum data
<ide><path>rc/spectroscopy/kekule.spectrum.core.js <ide> this.setMode(Kekule.Spectroscopy.DataMode.CONTINUOUS); <ide> }, <ide> <add> /** @ignore */ <add> ownerChanged: function(newOwner, oldOwner) <add> { <add> // change the owner of all extra info objects if possible <add> for (var i = 0, l = this.getDataCount(); i < l; ++i) <add> { <add> var extra = this.getExtraInfoAt(i); <add> if (extra && extra.setOwner) <add> extra.setOwner(newOwner); <add> } <add> this.tryApplySuper('ownerChanged', [newOwner, oldOwner]); <add> }, <add> <ide> // custom save / load method <ide> /** @ignore */ <ide> doSaveProp: function(obj, prop, storageNode, serializer) <ide> */ <ide> setRawValueAt: function(index, value) <ide> { <add> var oldValue = this.getDataItems()[index]; <add> if (oldValue && oldValue._extra) <add> this._extraInfoRemoved(oldValue._extra); <ide> this.getDataItems()[index] = value; <add> if (value._extra) <add> { <add> this._extraInfoAdded(value._extra); <add> } <ide> return this; <ide> }, <ide> /** @private */ <ide> */ <ide> setExtraInfoOf: function(value, info) <ide> { <add> if (value._extra) <add> this._extraInfoRemoved(value._extra); <ide> value._extra = info; <add> this._extraInfoAdded(info); <ide> return this; <ide> }, <ide> /** <ide> setExtraInfoAt: function(index, info) <ide> { <ide> var d = this.getDataItems()[index]; <add> if (d._extra) <add> this._extraInfoRemoved(d._extra); <ide> d._extra = info; <add> this._extraInfoAdded(info); <ide> return this; <add> }, <add> /** @private */ <add> _extraInfoAdded: function(extraInfo) <add> { <add> if (extraInfo && extraInfo instanceof Kekule.ChemObject) <add> { <add> extraInfo.setParent(this); <add> extraInfo.setOwner(this.getOwner()); <add> } <add> }, <add> /** @private */ <add> _extraInfoRemoved: function(extraInfo) <add> { <add> if (extraInfo && extraInfo instanceof Kekule.ChemObject && extraInfo.getParent() === this) <add> { <add> extraInfo.setParent(null); <add> extraInfo.setOwner(null); <add> } <ide> }, <ide> <ide> /**
Java
apache-2.0
ceeeff4583c4490a72148d6fad3cf78136f84a77
0
asoldano/wss4j,asoldano/wss4j,clibois/wss4j,apache/wss4j,apache/wss4j,jimma/wss4j,clibois/wss4j,jimma/wss4j
/* * Copyright 2003-2004 The Apache Software Foundation. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package org.apache.ws.security.message.token; import org.apache.ws.security.WSConstants; import org.apache.ws.security.WSSecurityException; import org.apache.ws.security.util.DOM2Writer; import org.apache.ws.security.util.WSSecurityUtil; import org.apache.ws.security.util.XmlSchemaDateFormat; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.w3c.dom.Node; import org.w3c.dom.Text; import java.text.ParseException; import java.text.SimpleDateFormat; import java.text.DateFormat; import java.util.Calendar; import java.util.TimeZone; import java.util.Date; import java.util.Vector; /** * Timestamp according to SOAP Message Security 1.0, * chapter 10 / appendix A.2 * <p/> * * @author Christof Soehngen ([email protected]) */ public class Timestamp { protected Element element = null; protected Vector customElements = null; protected Calendar created; protected Calendar expires; /** * Constructs a <code>Timestamp</code> object and parses the * <code>wsu:Timestamp</code> element to initialize it. * * @param element the <code>wsu:Timestamp</code> element that * contains the timestamp data */ public Timestamp(Element element) throws WSSecurityException { this.element = element; customElements = new Vector(); String strCreated = null; String strExpires = null; for (Node currentChild = element.getFirstChild(); currentChild != null; currentChild = currentChild.getNextSibling()) { if (currentChild instanceof Element) { if (WSConstants.CREATED_LN.equals(currentChild.getLocalName()) && WSConstants.WSU_NS.equals(currentChild.getNamespaceURI())) { if (strCreated == null) { strCreated = ((Text) ((Element) currentChild).getFirstChild()).getData(); } else { throw new WSSecurityException(WSSecurityException.INVALID_SECURITY, "invalidTimestamp"); } } else if (WSConstants.EXPIRES_LN.equals(currentChild.getLocalName()) && WSConstants.WSU_NS.equals(currentChild.getNamespaceURI())) { if (strExpires == null) { strExpires = ((Text) ((Element) currentChild).getFirstChild()).getData(); } else { throw new WSSecurityException(WSSecurityException.INVALID_SECURITY, "invalidTimestamp"); } } else { customElements.add((Element) currentChild); } } } DateFormat zulu = new XmlSchemaDateFormat();; try { if (strCreated != null) { created = Calendar.getInstance(); created.setTime(zulu.parse(strCreated)); } if (strExpires != null) { expires = Calendar.getInstance(); expires.setTime(zulu.parse(strExpires)); } } catch (ParseException e) { throw new WSSecurityException(WSSecurityException.INVALID_SECURITY, "invalidTimestamp", null, e); } } /** * Constructs a <code>Timestamp</code> object according * to the defined parameters. * <p/> * * @param doc the SOAP envelope as <code>Document</code> * @param ttl the time to live (validity of the security semantics) in seconds */ public Timestamp(boolean milliseconds, Document doc, int ttl) { customElements = new Vector(); element = doc.createElementNS(WSConstants.WSU_NS, WSConstants.WSU_PREFIX + ":" + WSConstants.TIMESTAMP_TOKEN_LN); WSSecurityUtil.setNamespace(element, WSConstants.WSU_NS, WSConstants.WSU_PREFIX); DateFormat zulu = null; if (milliseconds) { zulu = new XmlSchemaDateFormat(); } else { zulu = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss'Z'"); zulu.setTimeZone(TimeZone.getTimeZone("UTC")); } created = getCurrentTime(); Element elementCreated = doc.createElementNS(WSConstants.WSU_NS, WSConstants.WSU_PREFIX + ":" + WSConstants.CREATED_LN); WSSecurityUtil.setNamespace(elementCreated, WSConstants.WSU_NS, WSConstants.WSU_PREFIX); elementCreated.appendChild(doc.createTextNode(zulu.format(created.getTime()))); element.appendChild(elementCreated); if (ttl != 0) { long currentTime = created.getTimeInMillis(); currentTime += ttl * 1000; expires = getCurrentTime(); expires.setTimeInMillis(currentTime); Element elementExpires = doc.createElementNS(WSConstants.WSU_NS, WSConstants.WSU_PREFIX + ":" + WSConstants.EXPIRES_LN); WSSecurityUtil.setNamespace(elementExpires, WSConstants.WSU_NS, WSConstants.WSU_PREFIX); elementExpires.appendChild(doc.createTextNode(zulu.format(expires.getTime()))); element.appendChild(elementExpires); } } /** * Get the current time * * @return calendar the current time */ protected Calendar getCurrentTime() { return Calendar.getInstance(); } /** * Returns the dom element of this <code>Timestamp</code> object. * * @return the <code>wsse:UsernameToken</code> element */ public Element getElement() { return this.element; } /** * Returns the string representation of the token. * * @return a XML string representation */ public String toString() { return DOM2Writer.nodeToString((Node) this.element); } /** * Get the time of creation. * <p/> * * @return the "created" time */ public Calendar getCreated() { return created; } /** * Get the time of expiration. * <p/> * * @return the "expires" time */ public Calendar getExpires() { return expires; } /** * Creates and adds a custom element to this Timestamp */ public void addCustomElement(Document doc, Element customElement) { customElements.add(customElement); element.appendChild(customElement); } /** * Get the the custom elements from this Timestamp * * @return the vector containing the custom elements. */ public Vector getCustomElements() { return this.customElements; } /** * Set wsu:Id attribute of this timestamp * @param id */ public void setID(String id) { String prefix = WSSecurityUtil.setNamespace(this.element, WSConstants.WSU_NS, WSConstants.WSU_PREFIX); this.element.setAttributeNS(WSConstants.WSU_NS, prefix + ":Id", id); } /** * Returns the value of the wsu:Id attribute * @return TODO */ public String getID() { return this.element.getAttributeNS(WSConstants.WSU_NS, "Id"); } }
src/org/apache/ws/security/message/token/Timestamp.java
/* * Copyright 2003-2004 The Apache Software Foundation. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package org.apache.ws.security.message.token; import org.apache.ws.security.WSConstants; import org.apache.ws.security.WSSecurityException; import org.apache.ws.security.util.DOM2Writer; import org.apache.ws.security.util.WSSecurityUtil; import org.apache.ws.security.util.XmlSchemaDateFormat; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.w3c.dom.Node; import org.w3c.dom.Text; import java.text.ParseException; import java.text.SimpleDateFormat; import java.text.DateFormat; import java.util.Calendar; import java.util.TimeZone; import java.util.Date; import java.util.Vector; /** * Timestamp according to SOAP Message Security 1.0, * chapter 10 / appendix A.2 * <p/> * * @author Christof Soehngen ([email protected]) */ public class Timestamp { protected Element element = null; protected Element elementCreated = null; protected Element elementExpires = null; protected Vector customElements = null; protected Calendar created; protected Calendar expires; /** * Constructs a <code>Timestamp</code> object and parses the * <code>wsu:Timestamp</code> element to initialize it. * * @param element the <code>wsu:Timestamp</code> element that * contains the timestamp data */ public Timestamp(Element element) throws WSSecurityException { this.element = element; customElements = new Vector(); String strCreated = null; String strExpires = null; for (Node currentChild = element.getFirstChild(); currentChild != null; currentChild = currentChild.getNextSibling()) { if (currentChild instanceof Element) { if (WSConstants.CREATED_LN.equals(currentChild.getLocalName()) && WSConstants.WSU_NS.equals(currentChild.getNamespaceURI())) { if (strCreated == null) { strCreated = ((Text) ((Element) currentChild).getFirstChild()).getData(); } else { throw new WSSecurityException(WSSecurityException.INVALID_SECURITY, "invalidTimestamp"); } } else if (WSConstants.EXPIRES_LN.equals(currentChild.getLocalName()) && WSConstants.WSU_NS.equals(currentChild.getNamespaceURI())) { if (strExpires == null) { strExpires = ((Text) ((Element) currentChild).getFirstChild()).getData(); } else { throw new WSSecurityException(WSSecurityException.INVALID_SECURITY, "invalidTimestamp"); } } else { customElements.add((Element) currentChild); } } } DateFormat zulu = new XmlSchemaDateFormat();; try { if (strCreated != null) { created = Calendar.getInstance(); created.setTime(zulu.parse(strCreated)); } if (strExpires != null) { expires = Calendar.getInstance(); expires.setTime(zulu.parse(strExpires)); } } catch (ParseException e) { throw new WSSecurityException(WSSecurityException.INVALID_SECURITY, "invalidTimestamp", null, e); } } /** * Constructs a <code>Timestamp</code> object according * to the defined parameters. * <p/> * * @param doc the SOAP envelope as <code>Document</code> * @param ttl the time to live (validity of the security semantics) in seconds */ public Timestamp(boolean milliseconds, Document doc, int ttl) { customElements = new Vector(); element = doc.createElementNS(WSConstants.WSU_NS, WSConstants.WSU_PREFIX + ":" + WSConstants.TIMESTAMP_TOKEN_LN); WSSecurityUtil.setNamespace(element, WSConstants.WSU_NS, WSConstants.WSU_PREFIX); DateFormat zulu = null; if (milliseconds) { zulu = new XmlSchemaDateFormat(); } else { zulu = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss'Z'"); zulu.setTimeZone(TimeZone.getTimeZone("UTC")); } Calendar rightNow = Calendar.getInstance(); elementCreated = doc.createElementNS(WSConstants.WSU_NS, WSConstants.WSU_PREFIX + ":" + WSConstants.CREATED_LN); WSSecurityUtil.setNamespace(elementCreated, WSConstants.WSU_NS, WSConstants.WSU_PREFIX); elementCreated.appendChild(doc.createTextNode(zulu.format(rightNow.getTime()))); element.appendChild(elementCreated); if (ttl != 0) { long currentTime = rightNow.getTime().getTime(); currentTime += ttl * 1000; rightNow.setTime(new Date(currentTime)); elementExpires = doc.createElementNS(WSConstants.WSU_NS, WSConstants.WSU_PREFIX + ":" + WSConstants.EXPIRES_LN); WSSecurityUtil.setNamespace(elementExpires, WSConstants.WSU_NS, WSConstants.WSU_PREFIX); elementExpires.appendChild(doc.createTextNode(zulu.format(rightNow.getTime()))); element.appendChild(elementExpires); } } /** * Returns the dom element of this <code>Timestamp</code> object. * * @return the <code>wsse:UsernameToken</code> element */ public Element getElement() { return this.element; } /** * Returns the string representation of the token. * * @return a XML string representation */ public String toString() { return DOM2Writer.nodeToString((Node) this.element); } /** * Get the time of creation. * <p/> * * @return the "created" time */ public Calendar getCreated() { return created; } /** * Get the time of expiration. * <p/> * * @return the "expires" time */ public Calendar getExpires() { return expires; } /** * Creates and adds a custom element to this Timestamp */ public void addCustomElement(Document doc, Element customElement) { customElements.add(customElement); element.appendChild(customElement); } /** * Get the the custom elements from this Timestamp * * @return the vector containing the custom elements. */ public Vector getCustomElements() { return this.customElements; } /** * Set wsu:Id attribute of this timestamp * @param id */ public void setID(String id) { String prefix = WSSecurityUtil.setNamespace(this.element, WSConstants.WSU_NS, WSConstants.WSU_PREFIX); this.element.setAttributeNS(WSConstants.WSU_NS, prefix + ":Id", id); } /** * Returns the value of the wsu:Id attribute * @return TODO */ public String getID() { return this.element.getAttributeNS(WSConstants.WSU_NS, "Id"); } }
Apply patch from Christian to have a consistent behaviour when access created, expires fields. git-svn-id: 10bc45916fe30ae642aa5037c9a4b05727bba413@369830 13f79535-47bb-0310-9956-ffa450edef68
src/org/apache/ws/security/message/token/Timestamp.java
Apply patch from Christian to have a consistent behaviour when access created, expires fields.
<ide><path>rc/org/apache/ws/security/message/token/Timestamp.java <ide> public class Timestamp { <ide> <ide> protected Element element = null; <del> protected Element elementCreated = null; <del> protected Element elementExpires = null; <ide> protected Vector customElements = null; <ide> <ide> protected Calendar created; <ide> } <ide> } <ide> <add> <ide> /** <ide> * Constructs a <code>Timestamp</code> object according <ide> * to the defined parameters. <ide> zulu = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss'Z'"); <ide> zulu.setTimeZone(TimeZone.getTimeZone("UTC")); <ide> } <del> Calendar rightNow = Calendar.getInstance(); <del> <del> elementCreated = <add> created = getCurrentTime(); <add> <add> Element elementCreated = <ide> doc.createElementNS(WSConstants.WSU_NS, <ide> WSConstants.WSU_PREFIX + ":" + WSConstants.CREATED_LN); <ide> WSSecurityUtil.setNamespace(elementCreated, <ide> WSConstants.WSU_NS, <ide> WSConstants.WSU_PREFIX); <del> elementCreated.appendChild(doc.createTextNode(zulu.format(rightNow.getTime()))); <add> elementCreated.appendChild(doc.createTextNode(zulu.format(created.getTime()))); <ide> element.appendChild(elementCreated); <ide> if (ttl != 0) { <del> long currentTime = rightNow.getTime().getTime(); <add> long currentTime = created.getTimeInMillis(); <ide> currentTime += ttl * 1000; <del> rightNow.setTime(new Date(currentTime)); <del> <del> elementExpires = <add> expires = getCurrentTime(); <add> expires.setTimeInMillis(currentTime); <add> <add> Element elementExpires = <ide> doc.createElementNS(WSConstants.WSU_NS, <ide> WSConstants.WSU_PREFIX + ":" + WSConstants.EXPIRES_LN); <ide> WSSecurityUtil.setNamespace(elementExpires, <ide> WSConstants.WSU_NS, <ide> WSConstants.WSU_PREFIX); <del> elementExpires.appendChild(doc.createTextNode(zulu.format(rightNow.getTime()))); <add> elementExpires.appendChild(doc.createTextNode(zulu.format(expires.getTime()))); <ide> element.appendChild(elementExpires); <ide> } <ide> } <ide> <add> /** <add> * Get the current time <add> * <add> * @return calendar the current time <add> */ <add> protected Calendar getCurrentTime() { <add> return Calendar.getInstance(); <add> } <add> <ide> /** <ide> * Returns the dom element of this <code>Timestamp</code> object. <ide> *
Java
bsd-3-clause
90ef85b4b02a2aaa14f100a22bbb4f296d0c1da4
0
all-of-us/workbench,all-of-us/workbench,all-of-us/workbench,all-of-us/workbench,all-of-us/workbench,all-of-us/workbench,all-of-us/workbench,all-of-us/workbench,all-of-us/workbench
package org.pmiops.workbench.cohortbuilder; import static org.pmiops.workbench.cohortbuilder.util.Validation.from; import static org.pmiops.workbench.cohortbuilder.util.ValidationPredicates.betweenOperator; import static org.pmiops.workbench.cohortbuilder.util.ValidationPredicates.notBetweenAndNotInOperator; import static org.pmiops.workbench.cohortbuilder.util.ValidationPredicates.notZeroAndNotOne; import static org.pmiops.workbench.cohortbuilder.util.ValidationPredicates.operandsEmpty; import static org.pmiops.workbench.cohortbuilder.util.ValidationPredicates.operandsNotDates; import static org.pmiops.workbench.cohortbuilder.util.ValidationPredicates.operandsNotNumbers; import static org.pmiops.workbench.cohortbuilder.util.ValidationPredicates.operandsNotOne; import static org.pmiops.workbench.cohortbuilder.util.ValidationPredicates.operandsNotTwo; import static org.pmiops.workbench.cohortbuilder.util.ValidationPredicates.operatorNull; import static org.pmiops.workbench.cohortbuilder.util.ValidationPredicates.temporalGroupNull; import com.google.api.client.util.Sets; import com.google.cloud.bigquery.QueryParameterValue; import com.google.common.collect.ArrayListMultimap; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ListMultimap; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.stream.Collectors; import org.pmiops.workbench.exceptions.BadRequestException; import org.pmiops.workbench.model.AttrName; import org.pmiops.workbench.model.Attribute; import org.pmiops.workbench.model.CriteriaType; import org.pmiops.workbench.model.Domain; import org.pmiops.workbench.model.Modifier; import org.pmiops.workbench.model.ModifierType; import org.pmiops.workbench.model.Operator; import org.pmiops.workbench.model.SearchGroup; import org.pmiops.workbench.model.SearchGroupItem; import org.pmiops.workbench.model.SearchParameter; import org.pmiops.workbench.model.TemporalMention; import org.pmiops.workbench.model.TemporalTime; import org.pmiops.workbench.utils.OperatorUtils; /** SearchGroupItemQueryBuilder builds BigQuery queries for search group items. */ public final class SearchGroupItemQueryBuilder { private static final int STANDARD = 1; private static final int SOURCE = 0; private static final ImmutableMap<AttrName, String> AGE_COLUMN_SQL_MAP = ImmutableMap.of( AttrName.AGE, "DATE_DIFF(CURRENT_DATE,dob, YEAR) - IF(EXTRACT(MONTH FROM dob)*100 + EXTRACT(DAY FROM dob) > EXTRACT(MONTH FROM CURRENT_DATE)*100 + EXTRACT(DAY FROM CURRENT_DATE),1,0)", AttrName.AGE_AT_CONSENT, "age_at_consent", AttrName.AGE_AT_CDR, "age_at_cdr"); private static final ImmutableMap<CriteriaType, String> DEMO_COLUMN_SQL_MAP = ImmutableMap.of( CriteriaType.RACE, "race_concept_id", CriteriaType.GENDER, "gender_concept_id", CriteriaType.SEX, "sex_at_birth_concept_id", CriteriaType.ETHNICITY, "ethnicity_concept_id"); private static final ImmutableList<Domain> SOURCE_STANDARD_DOMAINS = ImmutableList.of(Domain.CONDITION, Domain.PROCEDURE); // sql parts to help construct BigQuery sql statements private static final String OR = " or "; private static final String AND = " and "; private static final String UNION_TEMPLATE = "union all\n"; private static final String DESC = " desc"; private static final String BASE_SQL = "select distinct person_id, entry_date, concept_id\n" + "from `${projectId}.${dataSetId}.cb_search_all_events`\n" + "where "; private static final String STANDARD_SQL = "is_standard = %s"; private static final String CONCEPT_ID_UNNEST_SQL = "concept_id in unnest(%s)"; private static final String CONCEPT_ID_IN_SQL = "concept_id in"; private static final String STANDARD_OR_SOURCE_SQL = STANDARD_SQL + AND + CONCEPT_ID_UNNEST_SQL + "\n"; public static final String CHILD_LOOKUP_SQL = " (select distinct c.concept_id\n" + "from `${projectId}.${dataSetId}.cb_criteria` c\n" + "join (select cast(cr.id as string) as id\n" + "from `${projectId}.${dataSetId}.cb_criteria` cr\n" + "where domain_id = %s\n" + "and is_standard = %s\n" + "and concept_id in unnest(%s)\n" + "and is_selectable = 1\n" + "and full_text like %s) a\n" + "on (c.path like concat('%%.', a.id, '.%%') or c.path like concat('%%.', a.id) or c.path like concat(a.id, '.%%'))\n" + "where domain_id = %s\n" + "and is_standard = %s\n" + "and is_selectable = 1)"; public static final String DRUG_CHILD_LOOKUP_SQL = " (select distinct ca.descendant_id\n" + "from `${projectId}.${dataSetId}.cb_criteria_ancestor` ca\n" + "join (select distinct c.concept_id\n" + "from `${projectId}.${dataSetId}.cb_criteria` c\n" + "join (select cast(cr.id as string) as id\n" + "from `${projectId}.${dataSetId}.cb_criteria` cr\n" + "where domain_id = %s\n" + "and is_standard = %s\n" + "and concept_id in unnest(%s)\n" + "and is_selectable = 1\n" + "and full_text like %s) a\n" + "on (c.path like concat('%%.', a.id, '.%%') or c.path like concat('%%.', a.id))\n" + "where domain_id = %s\n" + "and is_standard = %s\n" + "and is_selectable = 1) b on (ca.ancestor_id = b.concept_id))"; private static final String PARENT_STANDARD_OR_SOURCE_SQL = STANDARD_SQL + AND + CONCEPT_ID_IN_SQL + CHILD_LOOKUP_SQL; private static final String DRUG_SQL = STANDARD_SQL + AND + CONCEPT_ID_IN_SQL + DRUG_CHILD_LOOKUP_SQL; private static final String VALUE_AS_NUMBER = " value_as_number %s %s"; private static final String VALUE_AS_NUMBER_IS_NOT_NULL = " and value_as_number is not null"; private static final String VALUE_AS_CONCEPT_ID = " value_as_concept_id %s unnest(%s)"; private static final String VALUE_SOURCE_CONCEPT_ID = " value_source_concept_id %s unnest(%s)"; private static final String SOURCE_CONCEPT_SURVEY_ID = " and survey_version_concept_id %s unnest(%s)"; private static final String SYSTOLIC_SQL = " and systolic %s %s"; private static final String DIASTOLIC_SQL = " and diastolic %s %s"; // sql parts to help construct Temporal BigQuery sql private static final String SAME_ENC = "temp1.person_id = temp2.person_id and temp1.visit_occurrence_id = temp2.visit_occurrence_id\n"; private static final String X_DAYS_BEFORE = "temp1.person_id = temp2.person_id and temp1.entry_date <= DATE_SUB(temp2.entry_date, INTERVAL %s DAY)\n"; private static final String X_DAYS_AFTER = "temp1.person_id = temp2.person_id and temp1." + "entry_date >= DATE_ADD(temp2.entry_date, INTERVAL %s DAY)\n"; private static final String WITHIN_X_DAYS_OF = "temp1.person_id = temp2.person_id and temp1.entry_date between " + "DATE_SUB(temp2.entry_date, INTERVAL %s DAY) and DATE_ADD(temp2.entry_date, INTERVAL %s DAY)\n"; private static final String TEMPORAL_EXIST = "select temp1.person_id\n" + "from (%s) temp1\n" + "where exists (select 1\n" + "from (%s) temp2\n" + "where (%s))\n"; private static final String TEMPORAL_JOIN = "select temp1.person_id\n" + "from (%s) temp1\n" + "join (select person_id, visit_occurrence_id, entry_date\n" + "from (%s)\n" + ") temp2 on (%s)\n"; private static final String TEMPORAL_SQL = "select person_id, visit_occurrence_id, entry_date%s\n" + "from `${projectId}.${dataSetId}.cb_search_all_events`\n" + "where %s\n" + "and person_id in (%s)\n"; private static final String RANK_1_SQL = ", rank() over (partition by person_id order by entry_date%s) rn"; private static final String TEMPORAL_RANK_1_SQL = "select person_id, visit_occurrence_id, entry_date\n" + "from (%s) a\n" + "where rn = 1\n"; // sql parts to help construct Modifiers BigQuery sql private static final String MODIFIER_SQL_TEMPLATE = "select criteria.person_id from (%s) criteria\n"; private static final String OCCURRENCES_SQL_TEMPLATE = "group by criteria.person_id, criteria.concept_id\n" + "having count(criteria.person_id) "; private static final String AGE_AT_EVENT_SQL_TEMPLATE = " and age_at_event "; private static final String EVENT_DATE_SQL_TEMPLATE = " and entry_date "; private static final String ENCOUNTERS_SQL_TEMPLATE = " and visit_concept_id "; // sql parts to help construct demographic BigQuery sql private static final String DEC_SQL = "exists (\n" + "SELECT 'x' FROM `${projectId}.${dataSetId}.death` d\n" + "where d.person_id = p.person_id)\n"; private static final String DEMO_BASE = "select person_id\n" + "from `${projectId}.${dataSetId}.person` p\nwhere\n"; private static final String AGE_SQL = "select person_id\n" + "from `${projectId}.${dataSetId}.cb_search_person` p\nwhere %s %s %s\n"; private static final String AGE_DEC_SQL = "and not " + DEC_SQL; private static final String DEMO_IN_SQL = "%s in unnest(%s)\n"; private static final String HAS_FITBIT_SQL = "select person_id\n" + "from `${projectId}.${dataSetId}.cb_search_person` p\nwhere has_fitbit = 1\n"; private static final String HAS_PM_DATA_SQL = "select person_id\n" + "from `${projectId}.${dataSetId}.cb_search_person` p\nwhere has_physical_measurement_data = 1\n"; private static final String WHOLE_GENOME_VARIANT_SQL = "select person_id\n" + "from `${projectId}.${dataSetId}.cb_search_person` p\nwhere has_whole_genome_variant = 1\n"; private static final String CB_SEARCH_ALL_EVENTS_WHERE = "select person_id from `${projectId}.${dataSetId}.cb_search_all_events`\nwhere "; private static final String PERSON_ID_IN = "person_id in ("; /** Build the inner most sql using search parameters, modifiers and attributes. */ public static void buildQuery( Map<String, QueryParameterValue> queryParams, List<String> queryParts, SearchGroup searchGroup) { if (searchGroup.getTemporal()) { // build the outer temporal sql statement String query = buildOuterTemporalQuery(queryParams, searchGroup); queryParts.add(query); } else { for (SearchGroupItem searchGroupItem : searchGroup.getItems()) { // build regular sql statement String query = buildBaseQuery(queryParams, searchGroupItem, searchGroup.getMention()); queryParts.add(query); } } } /** Build the inner most sql */ private static String buildBaseQuery( Map<String, QueryParameterValue> queryParams, SearchGroupItem searchGroupItem, TemporalMention mention) { Set<SearchParameter> standardSearchParameters = new HashSet<>(); Set<SearchParameter> sourceSearchParameters = new HashSet<>(); List<String> queryParts = new ArrayList<>(); Domain domain = Domain.fromValue(searchGroupItem.getType()); // When building sql for demographics - we query against the person table if (Domain.PERSON.equals(domain)) { return buildDemoSql(queryParams, searchGroupItem); } if (Domain.FITBIT.equals(domain)) { return HAS_FITBIT_SQL; } if (Domain.WHOLE_GENOME_VARIANT.equals(domain)) { return WHOLE_GENOME_VARIANT_SQL; } if (hasPhysicalMeasurementData(searchGroupItem)) { return HAS_PM_DATA_SQL; } // Otherwise build sql against flat denormalized search table for (SearchParameter param : searchGroupItem.getSearchParameters()) { if (param.getAttributes().isEmpty()) { if (param.getStandard()) { standardSearchParameters.add(param); } else { sourceSearchParameters.add(param); } } else { queryParts.add(processAttributeSql(queryParams, param)); } } addParamValueAndFormat( domain.toString(), queryParams, standardSearchParameters, queryParts, STANDARD); addParamValueAndFormat( domain.toString(), queryParams, sourceSearchParameters, queryParts, SOURCE); String queryPartsSql; if (SOURCE_STANDARD_DOMAINS.contains(domain) && !sourceSearchParameters.isEmpty() && !standardSearchParameters.isEmpty()) { queryPartsSql = PERSON_ID_IN + CB_SEARCH_ALL_EVENTS_WHERE + String.join(UNION_TEMPLATE + CB_SEARCH_ALL_EVENTS_WHERE, queryParts) + ")"; } else { queryPartsSql = "(" + String.join(OR + "\n", queryParts) + ")"; } // format the base sql with all query parts String baseSql = BASE_SQL + queryPartsSql; // build modifier sql if modifiers exists String modifiedSql = buildModifierSql(baseSql, queryParams, searchGroupItem.getModifiers()); // build the inner temporal sql if this search group item is temporal // otherwise return modifiedSql return buildInnerTemporalQuery( modifiedSql, queryPartsSql, queryParams, searchGroupItem.getModifiers(), mention); } /** Build sql statement for demographics */ private static String buildDemoSql( Map<String, QueryParameterValue> queryParams, SearchGroupItem searchGroupItem) { List<SearchParameter> parameters = searchGroupItem.getSearchParameters(); SearchParameter param = parameters.get(0); switch (CriteriaType.valueOf(param.getType())) { case AGE: List<String> queryParts = new ArrayList<>(); parameters.forEach( searchParameter -> { Attribute attribute = searchParameter.getAttributes().get(0); String ageNamedParameter = QueryParameterUtil.addQueryParameterValue( queryParams, QueryParameterValue.int64(new Long(attribute.getOperands().get(0)))); if (attribute.getOperands().size() > 1) { String ageNamedParameter1 = QueryParameterUtil.addQueryParameterValue( queryParams, QueryParameterValue.int64(new Long(attribute.getOperands().get(1)))); ageNamedParameter = ageNamedParameter + AND + ageNamedParameter1; } String ageSql = String.format( AGE_SQL, AGE_COLUMN_SQL_MAP.get(attribute.getName()), OperatorUtils.getSqlOperator(attribute.getOperator()), ageNamedParameter); queryParts.add( AttrName.AGE_AT_CONSENT.equals(attribute.getName()) ? ageSql : ageSql + AGE_DEC_SQL); }); return String.join(UNION_TEMPLATE, queryParts); case GENDER: case SEX: case ETHNICITY: case RACE: // Gender, Sex, Ethnicity and Race all share the same implementation Long[] conceptIds = searchGroupItem.getSearchParameters().stream() .map(SearchParameter::getConceptId) .toArray(Long[]::new); String namedParameter = QueryParameterUtil.addQueryParameterValue( queryParams, QueryParameterValue.array(conceptIds, Long.class)); CriteriaType criteriaType = CriteriaType.fromValue(param.getType()); return DEMO_BASE + String.format(DEMO_IN_SQL, DEMO_COLUMN_SQL_MAP.get(criteriaType), namedParameter); case DECEASED: return DEMO_BASE + DEC_SQL; default: throw new BadRequestException( "Search unsupported for demographics type " + param.getType()); } } /** * Implementation of temporal CB queries. Please reference the following google doc for details: * https://docs.google.com/document/d/1OFrG7htm8gT0QOOvzHa7l3C3Qs0JnoENuK1TDAB_1A8 */ private static String buildInnerTemporalQuery( String modifiedSql, String conditionsSql, Map<String, QueryParameterValue> queryParams, List<Modifier> modifiers, TemporalMention mention) { if (mention == null) { return modifiedSql; } // if modifiers exists we need to add them again to the inner temporal sql conditionsSql = conditionsSql + getAgeDateAndEncounterSql(queryParams, modifiers); if (TemporalMention.ANY_MENTION.equals(mention)) { return String.format(TEMPORAL_SQL, "", conditionsSql, modifiedSql); } else if (TemporalMention.FIRST_MENTION.equals(mention)) { String rank1Sql = String.format(RANK_1_SQL, ""); String temporalSql = String.format(TEMPORAL_SQL, rank1Sql, conditionsSql, modifiedSql); return String.format(TEMPORAL_RANK_1_SQL, temporalSql); } String rank1Sql = String.format(RANK_1_SQL, DESC); String temporalSql = String.format(TEMPORAL_SQL, rank1Sql, conditionsSql, modifiedSql); return String.format(TEMPORAL_RANK_1_SQL, temporalSql); } /** * The temporal group functionality description is here: * https://docs.google.com/document/d/1OFrG7htm8gT0QOOvzHa7l3C3Qs0JnoENuK1TDAB_1A8 */ private static String buildOuterTemporalQuery( Map<String, QueryParameterValue> params, SearchGroup searchGroup) { List<String> temporalQueryParts1 = new ArrayList<>(); List<String> temporalQueryParts2 = new ArrayList<>(); ListMultimap<Integer, SearchGroupItem> temporalGroups = getTemporalGroups(searchGroup); for (Integer key : temporalGroups.keySet()) { List<SearchGroupItem> tempGroups = temporalGroups.get(key); // key of zero indicates belonging to the first temporal group // key of one indicates belonging to the second temporal group boolean isFirstGroup = key == 0; for (SearchGroupItem tempGroup : tempGroups) { String query = buildBaseQuery(params, tempGroup, searchGroup.getMention()); if (isFirstGroup) { temporalQueryParts1.add(query); } else { temporalQueryParts2.add(query); } } } String conditions = SAME_ENC; if (TemporalTime.WITHIN_X_DAYS_OF.equals(searchGroup.getTime())) { String parameterName = QueryParameterUtil.addQueryParameterValue( params, QueryParameterValue.int64(searchGroup.getTimeValue())); conditions = String.format(WITHIN_X_DAYS_OF, parameterName, parameterName); } else if (TemporalTime.X_DAYS_BEFORE.equals(searchGroup.getTime())) { String parameterName = QueryParameterUtil.addQueryParameterValue( params, QueryParameterValue.int64(searchGroup.getTimeValue())); conditions = String.format(X_DAYS_BEFORE, parameterName); } else if (TemporalTime.X_DAYS_AFTER.equals(searchGroup.getTime())) { String parameterName = QueryParameterUtil.addQueryParameterValue( params, QueryParameterValue.int64(searchGroup.getTimeValue())); conditions = String.format(X_DAYS_AFTER, parameterName); } return String.format( temporalQueryParts2.size() == 1 ? TEMPORAL_EXIST : TEMPORAL_JOIN, String.join(UNION_TEMPLATE, temporalQueryParts1), String.join(UNION_TEMPLATE, temporalQueryParts2), conditions); } /** * Helper method to collect search groups into 2 temporal groups. Key of zero indicates belonging * to the first temporal group. Key of one indicates belonging to the second temporal group. */ private static ListMultimap<Integer, SearchGroupItem> getTemporalGroups(SearchGroup searchGroup) { ListMultimap<Integer, SearchGroupItem> itemMap = ArrayListMultimap.create(); searchGroup .getItems() .forEach( item -> { from(temporalGroupNull()) .test(item) .throwException( "Bad Request: search group item temporal group {0} is not valid.", item.getTemporalGroup()); itemMap.put(item.getTemporalGroup(), item); }); from(notZeroAndNotOne()) .test(itemMap) .throwException( "Bad Request: Search Group Items must provided for 2 different temporal groups(0 or 1)."); return itemMap; } /** Helper method to build blood pressure sql. */ private static String processBloodPressureSql( Map<String, QueryParameterValue> queryParams, List<Attribute> attributes) { StringBuilder sqlBuilder = new StringBuilder(); for (Attribute attribute : attributes) { if (!AttrName.ANY.equals(attribute.getName())) { // this makes an assumption that the UI adds systolic attribute first. Otherwise we will // have to hard code the conceptId which is not optimal. String sqlTemplate = sqlBuilder.toString().contains("systolic") ? DIASTOLIC_SQL : SYSTOLIC_SQL; sqlBuilder.append( String.format( sqlTemplate, OperatorUtils.getSqlOperator(attribute.getOperator()), getOperandsExpression(queryParams, attribute))); } } return sqlBuilder.toString(); } private static String processAttributeSql( Map<String, QueryParameterValue> queryParams, SearchParameter parameter) { parameter.getAttributes().forEach(attr -> validateAttribute(attr)); String numsParam; String catsParam; String versionParam; List<Long> conceptIds = parameter.getAttributes().stream() .filter(attr -> attr.getConceptId() != null) .map(Attribute::getConceptId) .collect(Collectors.toList()); List<Attribute> cats = parameter.getAttributes().stream() .filter(attr -> attr.getName().equals(AttrName.CAT)) .collect(Collectors.toList()); List<Attribute> nums = parameter.getAttributes().stream() .filter(attr -> attr.getName().equals(AttrName.NUM)) .collect(Collectors.toList()); List<Attribute> any = parameter.getAttributes().stream() .filter(attr -> attr.getName().equals(AttrName.ANY)) .collect(Collectors.toList()); List<Attribute> versions = parameter.getAttributes().stream() .filter(attr -> attr.getName().equals(AttrName.SURVEY_VERSION_CONCEPT_ID)) .collect(Collectors.toList()); String standardParam = QueryParameterUtil.addQueryParameterValue( queryParams, QueryParameterValue.int64(parameter.getStandard() ? 1 : 0)); String conceptIdParam = QueryParameterUtil.addQueryParameterValue( queryParams, QueryParameterValue.array( conceptIds.isEmpty() ? new Long[] {parameter.getConceptId()} : conceptIds.toArray(new Long[0]), Long.class)); StringBuilder sqlBuilder = new StringBuilder(String.format(STANDARD_OR_SOURCE_SQL, standardParam, conceptIdParam)); if (!nums.isEmpty()) { if (!conceptIds.isEmpty()) { // attribute.conceptId is unique to blood pressure attributes // this indicates we need to build a blood pressure sql statement sqlBuilder.append(processBloodPressureSql(queryParams, parameter.getAttributes())); } else { String parens = cats.isEmpty() ? "" : "("; sqlBuilder.append( String.format( AND + parens + VALUE_AS_NUMBER, OperatorUtils.getSqlOperator(nums.get(0).getOperator()), getOperandsExpression(queryParams, nums.get(0)))); } } if (!cats.isEmpty()) { String andOrSql = nums.isEmpty() ? AND : OR; String parens = nums.isEmpty() ? "" : ")"; catsParam = QueryParameterUtil.addQueryParameterValue( queryParams, QueryParameterValue.array( cats.get(0).getOperands().stream().map(Long::parseLong).toArray(Long[]::new), Long.class)); String catsSql = Domain.SURVEY.toString().equals(parameter.getDomain()) ? VALUE_SOURCE_CONCEPT_ID : VALUE_AS_CONCEPT_ID; sqlBuilder.append( String.format( andOrSql + catsSql + parens, OperatorUtils.getSqlOperator(cats.get(0).getOperator()), catsParam)); } if (!versions.isEmpty()) { versionParam = QueryParameterUtil.addQueryParameterValue( queryParams, QueryParameterValue.array( versions.get(0).getOperands().stream().map(Long::parseLong).toArray(Long[]::new), Long.class)); sqlBuilder.append( String.format( SOURCE_CONCEPT_SURVEY_ID, OperatorUtils.getSqlOperator(versions.get(0).getOperator()), versionParam)); } if (!any.isEmpty() && parameter.getDomain().equals(Domain.SURVEY.toString())) { sqlBuilder.append(VALUE_AS_NUMBER_IS_NOT_NULL); } return sqlBuilder.toString(); } /** Helper method to build the operand sql expression. */ private static String getOperandsExpression( Map<String, QueryParameterValue> queryParams, Attribute attribute) { String operandsParam1 = QueryParameterUtil.addQueryParameterValue( queryParams, QueryParameterValue.float64(new Double(attribute.getOperands().get(0)))); String valueExpression; if (attribute.getOperator().equals(Operator.BETWEEN)) { String operandsParam2 = QueryParameterUtil.addQueryParameterValue( queryParams, QueryParameterValue.float64(new Double(attribute.getOperands().get(1)))); valueExpression = operandsParam1 + AND + operandsParam2; } else { valueExpression = operandsParam1; } return valueExpression; } /** Collect all child nodes per specified search parameters. */ private static Set<Long> childConceptIds( Map<SearchParameter, Set<Long>> criteriaLookup, List<SearchParameter> params) { Set<Long> out = Sets.newHashSet(); for (SearchParameter param : params) { if (param.getGroup() || param.getAncestorData()) { out.addAll(criteriaLookup.get(param)); } if (param.getConceptId() != null) { // not all SearchParameter have a concept id, so attributes/modifiers // are used to find matches in those scenarios. out.add(param.getConceptId()); } } return out; } /** Helper method to build modifier sql if needed. */ private static String buildModifierSql( String baseSql, Map<String, QueryParameterValue> queryParams, List<Modifier> modifiers) { validateModifiers(modifiers); String ageDateAndEncounterSql = getAgeDateAndEncounterSql(queryParams, modifiers); // Number of Occurrences has to be last because of the group by String occurrenceSql = buildOccurrencesSql(queryParams, getModifier(modifiers, ModifierType.NUM_OF_OCCURRENCES)); return String.format(MODIFIER_SQL_TEMPLATE, baseSql + ageDateAndEncounterSql) + occurrenceSql; } /** * Helper method to build all modifiers together except occurrences since it has to be last * because of the group by. */ private static String getAgeDateAndEncounterSql( Map<String, QueryParameterValue> queryParams, List<Modifier> modifiers) { List<Modifier> ageDateAndEncounterModifiers = new ArrayList<>(); ageDateAndEncounterModifiers.add(getModifier(modifiers, ModifierType.AGE_AT_EVENT)); ageDateAndEncounterModifiers.add(getModifier(modifiers, ModifierType.EVENT_DATE)); ageDateAndEncounterModifiers.add(getModifier(modifiers, ModifierType.ENCOUNTERS)); StringBuilder modifierSql = new StringBuilder(); for (Modifier modifier : ageDateAndEncounterModifiers) { if (modifier == null) { continue; } List<String> modifierParamList = new ArrayList<>(); for (String operand : modifier.getOperands()) { String modifierParameter = QueryParameterUtil.addQueryParameterValue( queryParams, (isAgeAtEvent(modifier) || isEncounters(modifier)) ? QueryParameterValue.int64(new Long(operand)) : QueryParameterValue.date(operand)); modifierParamList.add(modifierParameter); } if (isAgeAtEvent(modifier)) { modifierSql.append(AGE_AT_EVENT_SQL_TEMPLATE); modifierSql .append(OperatorUtils.getSqlOperator(modifier.getOperator())) .append(" ") .append(String.join(AND, modifierParamList)) .append("\n"); } else if (isEncounters(modifier)) { modifierSql.append(ENCOUNTERS_SQL_TEMPLATE); modifierSql .append(OperatorUtils.getSqlOperator(modifier.getOperator())) .append(" (") .append(modifierParamList.get(0)) .append(")\n"); } else { modifierSql.append(EVENT_DATE_SQL_TEMPLATE); modifierSql .append(OperatorUtils.getSqlOperator(modifier.getOperator())) .append(" ") .append(String.join(AND, modifierParamList)) .append("\n"); } } return modifierSql.toString(); } /** Helper method to build occurrences modifier sql. */ private static String buildOccurrencesSql( Map<String, QueryParameterValue> queryParams, Modifier occurrences) { StringBuilder modifierSql = new StringBuilder(); if (occurrences != null) { List<String> modifierParamList = new ArrayList<>(); for (String operand : occurrences.getOperands()) { String modifierParameter = QueryParameterUtil.addQueryParameterValue( queryParams, QueryParameterValue.int64(new Long(operand))); modifierParamList.add(modifierParameter); } modifierSql .append(OCCURRENCES_SQL_TEMPLATE) .append(OperatorUtils.getSqlOperator(occurrences.getOperator())) .append(" ") .append(String.join(AND, modifierParamList)) .append("\n"); } return modifierSql.toString(); } /** Add source or standard concept ids and set params * */ private static void addParamValueAndFormat( String domain, Map<String, QueryParameterValue> queryParams, Set<SearchParameter> searchParameters, List<String> queryParts, int standardOrSource) { if (!searchParameters.isEmpty()) { String standardOrSourceParam = QueryParameterUtil.addQueryParameterValue( queryParams, QueryParameterValue.int64(standardOrSource)); List<Long> conceptIds = searchParameters.stream().map(SearchParameter::getConceptId).collect(Collectors.toList()); Map<Boolean, List<SearchParameter>> parentsAndChildren = searchParameters.stream().collect(Collectors.partitioningBy(SearchParameter::getGroup)); List<Long> parents = parentsAndChildren.get(true).stream() .map(SearchParameter::getConceptId) .collect(Collectors.toList()); if (!parents.isEmpty() || Domain.DRUG.toString().equals(domain)) { String domainParam = QueryParameterUtil.addQueryParameterValue( queryParams, QueryParameterValue.string(domain)); String rankParam = QueryParameterUtil.addQueryParameterValue( queryParams, QueryParameterValue.string("%[" + domain.toLowerCase() + "_rank1]%")); String conceptIdsParam = QueryParameterUtil.addQueryParameterValue( queryParams, QueryParameterValue.array(conceptIds.toArray(new Long[0]), Long.class)); // Lookup child nodes queryParts.add( String.format( Domain.DRUG.toString().equals(domain) ? DRUG_SQL : PARENT_STANDARD_OR_SOURCE_SQL, standardOrSourceParam, domainParam, standardOrSourceParam, conceptIdsParam, rankParam, domainParam, standardOrSourceParam)); } else { // Children only String conceptIdsParam = QueryParameterUtil.addQueryParameterValue( queryParams, QueryParameterValue.array(conceptIds.toArray(new Long[0]), Long.class)); queryParts.add( String.format(STANDARD_OR_SOURCE_SQL, standardOrSourceParam, conceptIdsParam)); } } } /** Helper method to return a modifier. */ private static Modifier getModifier(List<Modifier> modifiers, ModifierType modifierType) { List<Modifier> modifierList = modifiers.stream() .filter(modifier -> modifier.getName().equals(modifierType)) .collect(Collectors.toList()); if (modifierList.isEmpty()) { return null; } return modifierList.get(0); } private static boolean isAgeAtEvent(Modifier modifier) { return modifier.getName().equals(ModifierType.AGE_AT_EVENT); } private static boolean isEncounters(Modifier modifier) { return modifier.getName().equals(ModifierType.ENCOUNTERS); } /** Validate attributes */ private static void validateAttribute(Attribute attr) { if (!AttrName.ANY.equals(attr.getName())) { from(operatorNull()) .test(attr) .throwException("Bad Request: attribute operator {0} is not valid.", attr.getOperator()); from(operandsEmpty()).test(attr).throwException("Bad Request: attribute operands are empty."); from(notBetweenAndNotInOperator().and(operandsNotOne())) .test(attr) .throwException( "Bad Request: attribute {0} must have one operand when using the {1} operator.", attr.getName().toString(), attr.getOperator().toString()); from(betweenOperator().and(operandsNotTwo())) .test(attr) .throwException( "Bad Request: attribute {0} can only have 2 operands when using the {1} operator", attr.getName().toString(), attr.getOperator().toString()); from(operandsNotNumbers()) .test(attr) .throwException( "Bad Request: attribute {0} operands must be numeric.", attr.getName().toString()); } } private static void validateModifiers(List<Modifier> modifiers) { modifiers.forEach( modifier -> { from(operatorNull()) .test(modifier) .throwException( "Bad Request: modifier operator {0} is not valid.", modifier.getOperator()); from(operandsEmpty()) .test(modifier) .throwException("Bad Request: modifier operands are empty."); from(notBetweenAndNotInOperator().and(operandsNotOne())) .test(modifier) .throwException( "Bad Request: modifier {0} must have one operand when using the {1} operator.", modifier.getName().toString(), modifier.getOperator().toString()); from(betweenOperator().and(operandsNotTwo())) .test(modifier) .throwException( "Bad Request: modifier {0} can only have 2 operands when using the {1} operator", modifier.getName().toString(), modifier.getOperator().toString()); if (ModifierType.EVENT_DATE.equals(modifier.getName())) { from(operandsNotDates()) .test(modifier) .throwException( "Bad Request: modifier {0} must be a valid date.", modifier.getName().toString()); } else { from(operandsNotNumbers()) .test(modifier) .throwException( "Bad Request: modifier {0} operands must be numeric.", modifier.getName().toString()); } }); } private static boolean hasPhysicalMeasurementData(SearchGroupItem searchGroupItem) { return searchGroupItem.getSearchParameters().size() == 1 && searchGroupItem.getSearchParameters().stream() .allMatch( sp -> Domain.PHYSICAL_MEASUREMENT.toString().equals(sp.getDomain()) && sp.getConceptId() == null && sp.getAttributes().isEmpty()); } }
api/src/main/java/org/pmiops/workbench/cohortbuilder/SearchGroupItemQueryBuilder.java
package org.pmiops.workbench.cohortbuilder; import static org.pmiops.workbench.cohortbuilder.util.Validation.from; import static org.pmiops.workbench.cohortbuilder.util.ValidationPredicates.betweenOperator; import static org.pmiops.workbench.cohortbuilder.util.ValidationPredicates.notBetweenAndNotInOperator; import static org.pmiops.workbench.cohortbuilder.util.ValidationPredicates.notZeroAndNotOne; import static org.pmiops.workbench.cohortbuilder.util.ValidationPredicates.operandsEmpty; import static org.pmiops.workbench.cohortbuilder.util.ValidationPredicates.operandsNotDates; import static org.pmiops.workbench.cohortbuilder.util.ValidationPredicates.operandsNotNumbers; import static org.pmiops.workbench.cohortbuilder.util.ValidationPredicates.operandsNotOne; import static org.pmiops.workbench.cohortbuilder.util.ValidationPredicates.operandsNotTwo; import static org.pmiops.workbench.cohortbuilder.util.ValidationPredicates.operatorNull; import static org.pmiops.workbench.cohortbuilder.util.ValidationPredicates.temporalGroupNull; import com.google.api.client.util.Sets; import com.google.cloud.bigquery.QueryParameterValue; import com.google.common.collect.ArrayListMultimap; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ListMultimap; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.stream.Collectors; import org.pmiops.workbench.exceptions.BadRequestException; import org.pmiops.workbench.model.AttrName; import org.pmiops.workbench.model.Attribute; import org.pmiops.workbench.model.CriteriaType; import org.pmiops.workbench.model.Domain; import org.pmiops.workbench.model.Modifier; import org.pmiops.workbench.model.ModifierType; import org.pmiops.workbench.model.Operator; import org.pmiops.workbench.model.SearchGroup; import org.pmiops.workbench.model.SearchGroupItem; import org.pmiops.workbench.model.SearchParameter; import org.pmiops.workbench.model.TemporalMention; import org.pmiops.workbench.model.TemporalTime; import org.pmiops.workbench.utils.OperatorUtils; /** SearchGroupItemQueryBuilder builds BigQuery queries for search group items. */ public final class SearchGroupItemQueryBuilder { private static final int STANDARD = 1; private static final int SOURCE = 0; private static final ImmutableMap<AttrName, String> AGE_COLUMN_SQL_MAP = ImmutableMap.of( AttrName.AGE, "DATE_DIFF(CURRENT_DATE,dob, YEAR) - IF(EXTRACT(MONTH FROM dob)*100 + EXTRACT(DAY FROM dob) > EXTRACT(MONTH FROM CURRENT_DATE)*100 + EXTRACT(DAY FROM CURRENT_DATE),1,0)", AttrName.AGE_AT_CONSENT, "age_at_consent", AttrName.AGE_AT_CDR, "age_at_cdr"); private static final ImmutableMap<CriteriaType, String> DEMO_COLUMN_SQL_MAP = ImmutableMap.of( CriteriaType.RACE, "race_concept_id", CriteriaType.GENDER, "gender_concept_id", CriteriaType.SEX, "sex_at_birth_concept_id", CriteriaType.ETHNICITY, "ethnicity_concept_id"); private static final ImmutableList<Domain> SOURCE_STANDARD_DOMAINS = ImmutableList.of(Domain.CONDITION, Domain.PROCEDURE); // sql parts to help construct BigQuery sql statements private static final String OR = " or "; private static final String AND = " and "; private static final String UNION_TEMPLATE = "union all\n"; private static final String DESC = " desc"; private static final String BASE_SQL = "select distinct person_id, entry_date, concept_id\n" + "from `${projectId}.${dataSetId}.cb_search_all_events`\n" + "where "; private static final String STANDARD_SQL = "is_standard = %s"; private static final String CONCEPT_ID_UNNEST_SQL = "concept_id in unnest(%s)"; private static final String CONCEPT_ID_IN_SQL = "concept_id in"; private static final String STANDARD_OR_SOURCE_SQL = STANDARD_SQL + AND + CONCEPT_ID_UNNEST_SQL + "\n"; public static final String CHILD_LOOKUP_SQL = " (select distinct c.concept_id\n" + "from `${projectId}.${dataSetId}.cb_criteria` c\n" + "join (select cast(cr.id as string) as id\n" + "from `${projectId}.${dataSetId}.cb_criteria` cr\n" + "where domain_id = %s\n" + "and is_standard = %s\n" + "and concept_id in unnest(%s)\n" + "and is_selectable = 1\n" + "and full_text like %s) a\n" + "on (c.path like concat('%%.', a.id, '.%%') or c.path like concat('%%.', a.id) or c.path like concat(a.id, '.%%'))\n" + "where domain_id = %s\n" + "and is_standard = %s\n" + "and is_selectable = 1)"; public static final String DRUG_CHILD_LOOKUP_SQL = " (select distinct ca.descendant_id\n" + "from `${projectId}.${dataSetId}.cb_criteria_ancestor` ca\n" + "join (select distinct c.concept_id\n" + "from `${projectId}.${dataSetId}.cb_criteria` c\n" + "join (select cast(cr.id as string) as id\n" + "from `${projectId}.${dataSetId}.cb_criteria` cr\n" + "where domain_id = %s\n" + "and is_standard = %s\n" + "and concept_id in unnest(%s)\n" + "and is_selectable = 1\n" + "and full_text like %s) a\n" + "on (c.path like concat('%%.', a.id, '.%%') or c.path like concat('%%.', a.id))\n" + "where domain_id = %s\n" + "and is_standard = %s\n" + "and is_selectable = 1) b on (ca.ancestor_id = b.concept_id))"; private static final String PARENT_STANDARD_OR_SOURCE_SQL = STANDARD_SQL + AND + CONCEPT_ID_IN_SQL + CHILD_LOOKUP_SQL; private static final String DRUG_SQL = STANDARD_SQL + AND + CONCEPT_ID_IN_SQL + DRUG_CHILD_LOOKUP_SQL; private static final String VALUE_AS_NUMBER = " value_as_number %s %s"; private static final String VALUE_AS_NUMBER_IS_NOT_NULL = " and value_as_number is not null"; private static final String VALUE_AS_CONCEPT_ID = " value_as_concept_id %s unnest(%s)"; private static final String VALUE_SOURCE_CONCEPT_ID = " value_source_concept_id %s unnest(%s)"; private static final String SOURCE_CONCEPT_SURVEY_ID = " and survey_version_concept_id %s unnest(%s)"; private static final String SYSTOLIC_SQL = " and systolic %s %s"; private static final String DIASTOLIC_SQL = " and diastolic %s %s"; // sql parts to help construct Temporal BigQuery sql private static final String SAME_ENC = "temp1.person_id = temp2.person_id and temp1.visit_occurrence_id = temp2.visit_occurrence_id\n"; private static final String X_DAYS_BEFORE = "temp1.person_id = temp2.person_id and temp1.entry_date <= DATE_SUB(temp2.entry_date, INTERVAL %s DAY)\n"; private static final String X_DAYS_AFTER = "temp1.person_id = temp2.person_id and temp1." + "entry_date >= DATE_ADD(temp2.entry_date, INTERVAL %s DAY)\n"; private static final String WITHIN_X_DAYS_OF = "temp1.person_id = temp2.person_id and temp1.entry_date between " + "DATE_SUB(temp2.entry_date, INTERVAL %s DAY) and DATE_ADD(temp2.entry_date, INTERVAL %s DAY)\n"; private static final String TEMPORAL_EXIST = "select temp1.person_id\n" + "from (%s) temp1\n" + "where exists (select 1\n" + "from (%s) temp2\n" + "where (%s))\n"; private static final String TEMPORAL_JOIN = "select temp1.person_id\n" + "from (%s) temp1\n" + "join (select person_id, visit_occurrence_id, entry_date\n" + "from (%s)\n" + ") temp2 on (%s)\n"; private static final String TEMPORAL_SQL = "select person_id, visit_occurrence_id, entry_date%s\n" + "from `${projectId}.${dataSetId}.cb_search_all_events`\n" + "where %s\n" + "and person_id in (%s)\n"; private static final String RANK_1_SQL = ", rank() over (partition by person_id order by entry_date%s) rn"; private static final String TEMPORAL_RANK_1_SQL = "select person_id, visit_occurrence_id, entry_date\n" + "from (%s) a\n" + "where rn = 1\n"; // sql parts to help construct Modifiers BigQuery sql private static final String MODIFIER_SQL_TEMPLATE = "select criteria.person_id from (%s) criteria\n"; private static final String OCCURRENCES_SQL_TEMPLATE = "group by criteria.person_id, criteria.concept_id\n" + "having count(criteria.person_id) "; private static final String AGE_AT_EVENT_SQL_TEMPLATE = " and age_at_event "; private static final String EVENT_DATE_SQL_TEMPLATE = " and entry_date "; private static final String ENCOUNTERS_SQL_TEMPLATE = " and visit_concept_id "; // sql parts to help construct demographic BigQuery sql private static final String DEC_SQL = "exists (\n" + "SELECT 'x' FROM `${projectId}.${dataSetId}.death` d\n" + "where d.person_id = p.person_id)\n"; private static final String DEMO_BASE = "select person_id\n" + "from `${projectId}.${dataSetId}.person` p\nwhere\n"; private static final String AGE_SQL = "select person_id\n" + "from `${projectId}.${dataSetId}.cb_search_person` p\nwhere %s %s %s\n"; private static final String AGE_DEC_SQL = "and not " + DEC_SQL; private static final String DEMO_IN_SQL = "%s in unnest(%s)\n"; private static final String HAS_FITBIT_SQL = "select person_id\n" + "from `${projectId}.${dataSetId}.cb_search_person` p\nwhere has_fitbit = 1\n"; private static final String HAS_PM_DATA_SQL = "select person_id\n" + "from `${projectId}.${dataSetId}.cb_search_person` p\nwhere has_physical_measurement_data = 1\n"; private static final String WHOLE_GENOME_VARIANT_SQL = "select person_id\n" + "from `${projectId}.${dataSetId}.cb_search_person` p\nwhere has_whole_genome_variant = 1\n"; private static final String CB_SEARCH_ALL_EVENTS_WHERE = "select person_id from `${projectId}.${dataSetId}.cb_search_all_events`\nwhere "; private static final String PERSON_ID_IN = "person_id in ("; /** Build the inner most sql using search parameters, modifiers and attributes. */ public static void buildQuery( Map<String, QueryParameterValue> queryParams, List<String> queryParts, SearchGroup searchGroup) { if (searchGroup.getTemporal()) { // build the outer temporal sql statement String query = buildOuterTemporalQuery(queryParams, searchGroup); queryParts.add(query); } else { for (SearchGroupItem searchGroupItem : searchGroup.getItems()) { // build regular sql statement String query = buildBaseQuery(queryParams, searchGroupItem, searchGroup.getMention()); queryParts.add(query); } } } /** Build the inner most sql */ private static String buildBaseQuery( Map<String, QueryParameterValue> queryParams, SearchGroupItem searchGroupItem, TemporalMention mention) { Set<SearchParameter> standardSearchParameters = new HashSet<>(); Set<SearchParameter> sourceSearchParameters = new HashSet<>(); List<String> queryParts = new ArrayList<>(); Domain domain = Domain.fromValue(searchGroupItem.getType()); // When building sql for demographics - we query against the person table if (Domain.PERSON.equals(domain)) { return buildDemoSql(queryParams, searchGroupItem); } if (Domain.FITBIT.equals(domain)) { return HAS_FITBIT_SQL; } if (Domain.WHOLE_GENOME_VARIANT.equals(domain)) { return WHOLE_GENOME_VARIANT_SQL; } if (hasPhysicalMeasurementData(searchGroupItem)) { return HAS_PM_DATA_SQL; } // Otherwise build sql against flat denormalized search table for (SearchParameter param : searchGroupItem.getSearchParameters()) { if (param.getAttributes().isEmpty()) { if (param.getStandard()) { standardSearchParameters.add(param); } else { sourceSearchParameters.add(param); } } else { queryParts.add(processAttributeSql(queryParams, param)); } } addParamValueAndFormat( domain.toString(), queryParams, standardSearchParameters, queryParts, STANDARD); addParamValueAndFormat( domain.toString(), queryParams, sourceSearchParameters, queryParts, SOURCE); String queryPartsSql; if (SOURCE_STANDARD_DOMAINS.contains(domain) && !sourceSearchParameters.isEmpty() && !standardSearchParameters.isEmpty()) { queryPartsSql = PERSON_ID_IN + CB_SEARCH_ALL_EVENTS_WHERE + String.join(UNION_TEMPLATE + CB_SEARCH_ALL_EVENTS_WHERE, queryParts) + ")"; } else { queryPartsSql = "(" + String.join(OR + "\n", queryParts) + ")"; } // format the base sql with all query parts String baseSql = BASE_SQL + queryPartsSql; // build modifier sql if modifiers exists String modifiedSql = buildModifierSql(baseSql, queryParams, searchGroupItem.getModifiers()); // build the inner temporal sql if this search group item is temporal // otherwise return modifiedSql return buildInnerTemporalQuery( modifiedSql, queryPartsSql, queryParams, searchGroupItem.getModifiers(), mention); } /** Build sql statement for demographics */ private static String buildDemoSql( Map<String, QueryParameterValue> queryParams, SearchGroupItem searchGroupItem) { List<SearchParameter> parameters = searchGroupItem.getSearchParameters(); SearchParameter param = parameters.get(0); switch (CriteriaType.valueOf(param.getType())) { case AGE: List<String> queryParts = new ArrayList<>(); parameters.forEach( searchParameter -> { Attribute attribute = searchParameter.getAttributes().get(0); String ageNamedParameter = QueryParameterUtil.addQueryParameterValue( queryParams, QueryParameterValue.int64(new Long(attribute.getOperands().get(0)))); if (attribute.getOperands().size() > 1) { String ageNamedParameter1 = QueryParameterUtil.addQueryParameterValue( queryParams, QueryParameterValue.int64(new Long(attribute.getOperands().get(1)))); ageNamedParameter = ageNamedParameter + AND + ageNamedParameter1; } String ageSql = String.format( AGE_SQL, AGE_COLUMN_SQL_MAP.get(attribute.getName()), OperatorUtils.getSqlOperator(attribute.getOperator()), ageNamedParameter); queryParts.add( AttrName.AGE_AT_CONSENT.equals(attribute.getName()) ? ageSql : ageSql + AGE_DEC_SQL); }); return String.join(UNION_TEMPLATE, queryParts); case GENDER: case SEX: case ETHNICITY: case RACE: // Gender, Sex, Ethnicity and Race all share the same implementation Long[] conceptIds = searchGroupItem.getSearchParameters().stream() .map(SearchParameter::getConceptId) .toArray(Long[]::new); String namedParameter = QueryParameterUtil.addQueryParameterValue( queryParams, QueryParameterValue.array(conceptIds, Long.class)); CriteriaType criteriaType = CriteriaType.fromValue(param.getType()); return DEMO_BASE + String.format(DEMO_IN_SQL, DEMO_COLUMN_SQL_MAP.get(criteriaType), namedParameter); case DECEASED: return DEMO_BASE + DEC_SQL; default: throw new BadRequestException( "Search unsupported for demographics type " + param.getType()); } } /** * Implementation of temporal CB queries. Please reference the following google doc for details: * https://docs.google.com/document/d/1OFrG7htm8gT0QOOvzHa7l3C3Qs0JnoENuK1TDAB_1A8 */ private static String buildInnerTemporalQuery( String modifiedSql, String conditionsSql, Map<String, QueryParameterValue> queryParams, List<Modifier> modifiers, TemporalMention mention) { if (mention == null) { return modifiedSql; } // if modifiers exists we need to add them again to the inner temporal sql conditionsSql = conditionsSql + getAgeDateAndEncounterSql(queryParams, modifiers); if (TemporalMention.ANY_MENTION.equals(mention)) { return String.format(TEMPORAL_SQL, "", conditionsSql, modifiedSql); } else if (TemporalMention.FIRST_MENTION.equals(mention)) { String rank1Sql = String.format(RANK_1_SQL, ""); String temporalSql = String.format(TEMPORAL_SQL, rank1Sql, conditionsSql, modifiedSql); return String.format(TEMPORAL_RANK_1_SQL, temporalSql); } String rank1Sql = String.format(RANK_1_SQL, DESC); String temporalSql = String.format(TEMPORAL_SQL, rank1Sql, conditionsSql, modifiedSql); return String.format(TEMPORAL_RANK_1_SQL, temporalSql); } /** * The temporal group functionality description is here: * https://docs.google.com/document/d/1OFrG7htm8gT0QOOvzHa7l3C3Qs0JnoENuK1TDAB_1A8 */ private static String buildOuterTemporalQuery( Map<String, QueryParameterValue> params, SearchGroup searchGroup) { List<String> temporalQueryParts1 = new ArrayList<>(); List<String> temporalQueryParts2 = new ArrayList<>(); ListMultimap<Integer, SearchGroupItem> temporalGroups = getTemporalGroups(searchGroup); for (Integer key : temporalGroups.keySet()) { List<SearchGroupItem> tempGroups = temporalGroups.get(key); // key of zero indicates belonging to the first temporal group // key of one indicates belonging to the second temporal group boolean isFirstGroup = key == 0; for (SearchGroupItem tempGroup : tempGroups) { String query = buildBaseQuery(params, tempGroup, searchGroup.getMention()); if (isFirstGroup) { temporalQueryParts1.add(query); } else { temporalQueryParts2.add(query); } } } String conditions = SAME_ENC; if (TemporalTime.WITHIN_X_DAYS_OF.equals(searchGroup.getTime())) { String parameterName = QueryParameterUtil.addQueryParameterValue( params, QueryParameterValue.int64(searchGroup.getTimeValue())); conditions = String.format(WITHIN_X_DAYS_OF, parameterName, parameterName); } else if (TemporalTime.X_DAYS_BEFORE.equals(searchGroup.getTime())) { String parameterName = QueryParameterUtil.addQueryParameterValue( params, QueryParameterValue.int64(searchGroup.getTimeValue())); conditions = String.format(X_DAYS_BEFORE, parameterName); } else if (TemporalTime.X_DAYS_AFTER.equals(searchGroup.getTime())) { String parameterName = QueryParameterUtil.addQueryParameterValue( params, QueryParameterValue.int64(searchGroup.getTimeValue())); conditions = String.format(X_DAYS_AFTER, parameterName); } return String.format( temporalQueryParts2.size() == 1 ? TEMPORAL_EXIST : TEMPORAL_JOIN, String.join(UNION_TEMPLATE, temporalQueryParts1), String.join(UNION_TEMPLATE, temporalQueryParts2), conditions); } /** * Helper method to collect search groups into 2 temporal groups. Key of zero indicates belonging * to the first temporal group. Key of one indicates belonging to the second temporal group. */ private static ListMultimap<Integer, SearchGroupItem> getTemporalGroups(SearchGroup searchGroup) { ListMultimap<Integer, SearchGroupItem> itemMap = ArrayListMultimap.create(); searchGroup .getItems() .forEach( item -> { from(temporalGroupNull()) .test(item) .throwException( "Bad Request: search group item temporal group {0} is not valid.", item.getTemporalGroup()); itemMap.put(item.getTemporalGroup(), item); }); from(notZeroAndNotOne()) .test(itemMap) .throwException( "Bad Request: Search Group Items must provided for 2 different temporal groups(0 or 1)."); return itemMap; } /** Helper method to build blood pressure sql. */ private static String processBloodPressureSql( Map<String, QueryParameterValue> queryParams, List<Attribute> attributes) { StringBuilder sqlBuilder = new StringBuilder(); for (Attribute attribute : attributes) { if (!AttrName.ANY.equals(attribute.getName())) { // this makes an assumption that the UI adds systolic attribute first. Otherwise we will // have to hard code the conceptId which is not optimal. String sqlTemplate = sqlBuilder.toString().contains("systolic") ? DIASTOLIC_SQL : SYSTOLIC_SQL; sqlBuilder.append( String.format( sqlTemplate, OperatorUtils.getSqlOperator(attribute.getOperator()), getOperandsExpression(queryParams, attribute))); } } return sqlBuilder.toString(); } private static String processAttributeSql( Map<String, QueryParameterValue> queryParams, SearchParameter parameter) { parameter.getAttributes().forEach(attr -> validateAttribute(attr)); String numsParam; String catsParam; String versionParam; List<Long> conceptIds = parameter.getAttributes().stream() .filter(attr -> attr.getConceptId() != null) .map(Attribute::getConceptId) .collect(Collectors.toList()); List<Attribute> cats = parameter.getAttributes().stream() .filter(attr -> attr.getName().equals(AttrName.CAT)) .collect(Collectors.toList()); List<Attribute> nums = parameter.getAttributes().stream() .filter(attr -> attr.getName().equals(AttrName.NUM)) .collect(Collectors.toList()); List<Attribute> any = parameter.getAttributes().stream() .filter(attr -> attr.getName().equals(AttrName.ANY)) .collect(Collectors.toList()); List<Attribute> versions = parameter.getAttributes().stream() .filter(attr -> attr.getName().equals(AttrName.SURVEY_VERSION_CONCEPT_ID)) .collect(Collectors.toList()); String standardParam = QueryParameterUtil.addQueryParameterValue( queryParams, QueryParameterValue.int64(parameter.getStandard() ? 1 : 0)); String conceptIdParam = QueryParameterUtil.addQueryParameterValue( queryParams, QueryParameterValue.array( conceptIds.isEmpty() ? new Long[] {parameter.getConceptId()} : conceptIds.toArray(new Long[0]), Long.class)); StringBuilder sqlBuilder = new StringBuilder(String.format(STANDARD_OR_SOURCE_SQL, standardParam, conceptIdParam)); if (!nums.isEmpty()) { if (!conceptIds.isEmpty()) { // attribute.conceptId is unique to blood pressure attributes // this indicates we need to build a blood pressure sql statement sqlBuilder.append(processBloodPressureSql(queryParams, parameter.getAttributes())); } else { String parens = cats.isEmpty() ? "" : "("; sqlBuilder.append( String.format( AND + parens + VALUE_AS_NUMBER, OperatorUtils.getSqlOperator(nums.get(0).getOperator()), getOperandsExpression(queryParams, nums.get(0)))); } } if (!cats.isEmpty()) { String andOrSql = nums.isEmpty() ? AND : OR; String parens = nums.isEmpty() ? "" : ")"; catsParam = QueryParameterUtil.addQueryParameterValue( queryParams, QueryParameterValue.array( cats.get(0).getOperands().stream().map(Long::parseLong).toArray(Long[]::new), Long.class)); String catsSql = Domain.SURVEY.toString().equals(parameter.getDomain()) ? VALUE_SOURCE_CONCEPT_ID : VALUE_AS_CONCEPT_ID; sqlBuilder.append( String.format( andOrSql + catsSql + parens, OperatorUtils.getSqlOperator(cats.get(0).getOperator()), catsParam)); } if (!versions.isEmpty()) { versionParam = QueryParameterUtil.addQueryParameterValue( queryParams, QueryParameterValue.array( versions.get(0).getOperands().stream().map(Long::parseLong).toArray(Long[]::new), Long.class)); sqlBuilder.append( String.format( SOURCE_CONCEPT_SURVEY_ID, OperatorUtils.getSqlOperator(versions.get(0).getOperator()), versionParam)); } if (!any.isEmpty() && parameter.getDomain().equals(Domain.SURVEY.toString())) { sqlBuilder.append(VALUE_AS_NUMBER_IS_NOT_NULL); } return sqlBuilder.toString(); } /** Helper method to build the operand sql expression. */ private static String getOperandsExpression( Map<String, QueryParameterValue> queryParams, Attribute attribute) { String operandsParam1 = QueryParameterUtil.addQueryParameterValue( queryParams, QueryParameterValue.float64(new Double(attribute.getOperands().get(0)))); String valueExpression; if (attribute.getOperator().equals(Operator.BETWEEN)) { String operandsParam2 = QueryParameterUtil.addQueryParameterValue( queryParams, QueryParameterValue.float64(new Double(attribute.getOperands().get(1)))); valueExpression = operandsParam1 + AND + operandsParam2; } else { valueExpression = operandsParam1; } return valueExpression; } /** Collect all child nodes per specified search parameters. */ private static Set<Long> childConceptIds( Map<SearchParameter, Set<Long>> criteriaLookup, List<SearchParameter> params) { Set<Long> out = Sets.newHashSet(); for (SearchParameter param : params) { if (param.getGroup() || param.getAncestorData()) { out.addAll(criteriaLookup.get(param)); } if (param.getConceptId() != null) { // not all SearchParameter have a concept id, so attributes/modifiers // are used to find matches in those scenarios. out.add(param.getConceptId()); } } return out; } /** Helper method to build modifier sql if needed. */ private static String buildModifierSql( String baseSql, Map<String, QueryParameterValue> queryParams, List<Modifier> modifiers) { validateModifiers(modifiers); String ageDateAndEncounterSql = getAgeDateAndEncounterSql(queryParams, modifiers); // Number of Occurrences has to be last because of the group by String occurrenceSql = buildOccurrencesSql(queryParams, getModifier(modifiers, ModifierType.NUM_OF_OCCURRENCES)); return String.format(MODIFIER_SQL_TEMPLATE, baseSql + ageDateAndEncounterSql) + occurrenceSql; } /** * Helper method to build all modifiers together except occurrences since it has to be last * because of the group by. */ private static String getAgeDateAndEncounterSql( Map<String, QueryParameterValue> queryParams, List<Modifier> modifiers) { List<Modifier> ageDateAndEncounterModifiers = new ArrayList<>(); ageDateAndEncounterModifiers.add(getModifier(modifiers, ModifierType.AGE_AT_EVENT)); ageDateAndEncounterModifiers.add(getModifier(modifiers, ModifierType.EVENT_DATE)); ageDateAndEncounterModifiers.add(getModifier(modifiers, ModifierType.ENCOUNTERS)); StringBuilder modifierSql = new StringBuilder(); for (Modifier modifier : ageDateAndEncounterModifiers) { if (modifier == null) { continue; } List<String> modifierParamList = new ArrayList<>(); for (String operand : modifier.getOperands()) { String modifierParameter = QueryParameterUtil.addQueryParameterValue( queryParams, (isAgeAtEvent(modifier) || isEncounters(modifier)) ? QueryParameterValue.int64(new Long(operand)) : QueryParameterValue.date(operand)); modifierParamList.add(modifierParameter); } if (isAgeAtEvent(modifier)) { modifierSql.append(AGE_AT_EVENT_SQL_TEMPLATE); modifierSql .append(OperatorUtils.getSqlOperator(modifier.getOperator())) .append(" ") .append(String.join(AND, modifierParamList)) .append("\n"); } else if (isEncounters(modifier)) { modifierSql.append(ENCOUNTERS_SQL_TEMPLATE); modifierSql .append(OperatorUtils.getSqlOperator(modifier.getOperator())) .append(" (") .append(modifierParamList.get(0)) .append(")\n"); } else { modifierSql.append(EVENT_DATE_SQL_TEMPLATE); modifierSql .append(OperatorUtils.getSqlOperator(modifier.getOperator())) .append(" ") .append(String.join(AND, modifierParamList)) .append("\n"); } } return modifierSql.toString(); } /** Helper method to build occurrences modifier sql. */ private static String buildOccurrencesSql( Map<String, QueryParameterValue> queryParams, Modifier occurrences) { StringBuilder modifierSql = new StringBuilder(); if (occurrences != null) { List<String> modifierParamList = new ArrayList<>(); for (String operand : occurrences.getOperands()) { String modifierParameter = QueryParameterUtil.addQueryParameterValue( queryParams, QueryParameterValue.int64(new Long(operand))); modifierParamList.add(modifierParameter); } modifierSql .append(OCCURRENCES_SQL_TEMPLATE) .append(OperatorUtils.getSqlOperator(occurrences.getOperator())) .append(" ") .append(String.join(AND, modifierParamList)) .append("\n"); } return modifierSql.toString(); } /** Add source or standard concept ids and set params * */ private static void addParamValueAndFormat( String domain, Map<String, QueryParameterValue> queryParams, Set<SearchParameter> searchParameters, List<String> queryParts, int standardOrSource) { if (!searchParameters.isEmpty()) { String standardOrSourceParam = QueryParameterUtil.addQueryParameterValue( queryParams, QueryParameterValue.int64(standardOrSource)); List<Long> conceptIds = searchParameters.stream().map(SearchParameter::getConceptId).collect(Collectors.toList()); Map<Boolean, List<SearchParameter>> parentsAndChildren = searchParameters.stream().collect(Collectors.partitioningBy(SearchParameter::getGroup)); List<Long> parents = parentsAndChildren.get(true).stream() .map(SearchParameter::getConceptId) .collect(Collectors.toList()); if (!parents.isEmpty() || Domain.DRUG.toString().equals(domain)) { String domainParam = QueryParameterUtil.addQueryParameterValue( queryParams, QueryParameterValue.string(domain)); String rankParam = QueryParameterUtil.addQueryParameterValue( queryParams, QueryParameterValue.string("%[" + domain.toLowerCase() + "_rank1]%")); String conceptIdsParam = QueryParameterUtil.addQueryParameterValue( queryParams, QueryParameterValue.array(conceptIds.toArray(new Long[0]), Long.class)); // Lookup child nodes queryParts.add( String.format( Domain.DRUG.toString().equals(domain) ? DRUG_SQL : PARENT_STANDARD_OR_SOURCE_SQL, standardOrSourceParam, domainParam, standardOrSourceParam, conceptIdsParam, rankParam, domainParam, standardOrSourceParam)); } else { // Children only String conceptIdsParam = QueryParameterUtil.addQueryParameterValue( queryParams, QueryParameterValue.array(conceptIds.toArray(new Long[0]), Long.class)); queryParts.add( String.format(STANDARD_OR_SOURCE_SQL, standardOrSourceParam, conceptIdsParam)); } } } /** Helper method to return a modifier. */ private static Modifier getModifier(List<Modifier> modifiers, ModifierType modifierType) { List<Modifier> modifierList = modifiers.stream() .filter(modifier -> modifier.getName().equals(modifierType)) .collect(Collectors.toList()); if (modifierList.isEmpty()) { return null; } return modifierList.get(0); } private static boolean isAgeAtEvent(Modifier modifier) { return modifier.getName().equals(ModifierType.AGE_AT_EVENT); } private static boolean isEncounters(Modifier modifier) { return modifier.getName().equals(ModifierType.ENCOUNTERS); } /** Validate attributes */ private static void validateAttribute(Attribute attr) { if (!AttrName.ANY.equals(attr.getName())) { from(operatorNull()) .test(attr) .throwException("Bad Request: attribute operator {0} is not valid.", attr.getOperator()); from(operandsEmpty()).test(attr).throwException("Bad Request: attribute operands are empty."); from(notBetweenAndNotInOperator().and(operandsNotOne())) .test(attr) .throwException( "Bad Request: attribute {0} must have one operand when using the {1} operator.", attr.getName().toString(), attr.getOperator().toString()); from(betweenOperator().and(operandsNotTwo())) .test(attr) .throwException( "Bad Request: attribute {0} can only have 2 operands when using the {1} operator", attr.getName().toString(), attr.getOperator().toString()); from(operandsNotNumbers()) .test(attr) .throwException( "Bad Request: attribute {0} operands must be numeric.", attr.getName().toString()); } } private static void validateModifiers(List<Modifier> modifiers) { modifiers.forEach( modifier -> { from(operatorNull()) .test(modifier) .throwException( "Bad Request: modifier operator {0} is not valid.", modifier.getOperator()); from(operandsEmpty()) .test(modifier) .throwException("Bad Request: modifier operands are empty."); from(notBetweenAndNotInOperator().and(operandsNotOne())) .test(modifier) .throwException( "Bad Request: modifier {0} must have one operand when using the {1} operator.", modifier.getName().toString(), modifier.getOperator().toString()); from(betweenOperator().and(operandsNotTwo())) .test(modifier) .throwException( "Bad Request: modifier {0} can only have 2 operands when using the {1} operator", modifier.getName().toString(), modifier.getOperator().toString()); if (ModifierType.EVENT_DATE.equals(modifier.getName())) { from(operandsNotDates()) .test(modifier) .throwException( "Bad Request: modifier {0} must be a valid date.", modifier.getName().toString()); } else { from(operandsNotNumbers()) .test(modifier) .throwException( "Bad Request: modifier {0} operands must be numeric.", modifier.getName().toString()); } }); } private static boolean hasPhysicalMeasurementData(SearchGroupItem searchGroupItem) { return searchGroupItem.getSearchParameters().size() == 1 && searchGroupItem.getSearchParameters().stream() .allMatch( sp -> Domain.PHYSICAL_MEASUREMENT.toString().equals(sp.getDomain()) && sp.getConceptId() == null); } }
fixing PM (#4698)
api/src/main/java/org/pmiops/workbench/cohortbuilder/SearchGroupItemQueryBuilder.java
fixing PM (#4698)
<ide><path>pi/src/main/java/org/pmiops/workbench/cohortbuilder/SearchGroupItemQueryBuilder.java <ide> .allMatch( <ide> sp -> <ide> Domain.PHYSICAL_MEASUREMENT.toString().equals(sp.getDomain()) <del> && sp.getConceptId() == null); <add> && sp.getConceptId() == null <add> && sp.getAttributes().isEmpty()); <ide> } <ide> }
JavaScript
apache-2.0
7b3022eafe8f85fd5c9e409ca9174a8cee062c63
0
ShoppinPal/vend-nodejs-sdk
'use strict'; var _ = require('underscore'); var moment = require('moment'); var Promise = require('bluebird'); var request = require('request-promise'); //request.debug = true; var log = require('winston'); log.remove(log.transports.Console); log.add(log.transports.Console, {colorize: true, timestamp: false, level: 'debug'}); function RateLimitingError(e) { return e.statusCode == 429; } function AuthNError(e) { return e.statusCode == 401; } function AuthZError(e) { return e.statusCode == 403; } function ClientError(e) { return e.statusCode >= 400 && e.statusCode < 500; } var successHandler = function(response) { if(_.isArray(response)) { console.log('response is an array'); } else if(_.isObject(response)) { console.log('response is an object'); } else if(_.isString(response)) { console.log('response is a string'); try{ var responseObject = JSON.parse(response); //console.log(responseObject); return Promise.resolve(responseObject); } catch(error){ console.error('caught an error: ', error); throw error; } } else { console.log(response); } }; /** * TODO: Should we reuse the following library instead of rolling our own implementation here? * https://github.com/you21979/node-limit-request-promise * * @param bodyObject * @param connectionInfo - contains tokens and domainPrefix * @param retryCounter * @param callback * @returns {*|Parse.Promise} */ var retryWhenRateLimited = function(bodyObject, args, connectionInfo, callback, retryCounter) { if(retryCounter<3) { var retryAfter = 5*60*1000; // by default Vend will never block for more than 5 minutes retryAfter = Math.max(moment(bodyObject['retry-after']).diff(moment()), 0); //retryAfter = 5000; // for sanity testing counter increments quickly console.log('retry after: ' + retryAfter + ' ms'); return Promise.delay(retryAfter) .then(function() { console.log(retryAfter + ' ms have passed...'); return callback(args, connectionInfo, ++retryCounter); }); } }; var retryWhenAuthNFails = function(args, connectionInfo, callback, retryCounter) { if(retryCounter<3) { if ( !(connectionInfo.vendTokenService && connectionInfo.vendClientId && connectionInfo.vendClientSecret && connectionInfo.refreshToken) ) { return Promise.reject('missing required arguments for retryWhenAuthNFails()'); // throw e; // TODO: throw unknown errors but reject well known errors? } console.log('try to fetch a new access token'); return refreshAccessToken( //TODO: validate connectionInfo before using it for retries? connectionInfo.vendTokenService, connectionInfo.vendClientId, connectionInfo.vendClientSecret, connectionInfo.refreshToken, connectionInfo.domainPrefix ) .then(function(oauthInfo) { console.log('update connectionInfo w/ new token before using it again'); if (oauthInfo.access_token) { console.log('received new access_token: ' + oauthInfo.access_token); connectionInfo.accessToken = oauthInfo.access_token; } if (oauthInfo.refresh_token) { console.log('received new refresh_token: ' + oauthInfo.refresh_token); connectionInfo.refreshToken = oauthInfo.refresh_token; } console.log('retrying with new accessToken: ' + connectionInfo.accessToken); return callback(args, connectionInfo, ++retryCounter); }); } }; var sendRequest = function(options, args, connectionInfo, callback, retryCounter) { return request(options) .then(successHandler) .catch(RateLimitingError, function(e) { console.log('A RateLimitingError error like "429 Too Many Requests" happened: \n' + 'statusCode: ' + e.statusCode + '\n' + 'body: ' + e.response.body + '\n' //+ JSON.stringify(e.response.headers,null,2) ); var bodyObject = JSON.parse(e.response.body); console.log(bodyObject['retry-after']); console.log( moment(bodyObject['retry-after']).format('dddd, MMMM Do YYYY, h:mm:ss a ZZ') ); /*successHandler(e.response.body) .then(function(bodyObject){ console.log(bodyObject['retry-after']); console.log( moment(bodyObject['retry-after']).format('dddd, MMMM Do YYYY, h:mm:ss a ZZ') ); });*/ return retryWhenRateLimited(bodyObject, args, connectionInfo, callback, retryCounter); // TODO: how should a catch-block respond if there is a problem within the retry? }) .catch(AuthNError, function(e) { console.log('An AuthNError happened: \n' + 'statusCode: ' + e.statusCode + '\n' + 'body: ' + e.response.body + '\n' /*+ JSON.stringify(e.response.headers,null,2) + JSON.stringify(e,null,2)*/ ); return retryWhenAuthNFails(args, connectionInfo, callback, retryCounter); // TODO: how to prevent a throw or rejection from also stepping thru the other catch-blocks? }) .catch(ClientError, function(e) { console.log('A ClientError happened: \n' + e.statusCode + ' ' + e.response.body + '\n' /*+ JSON.stringify(e.response.headers,null,2) + JSON.stringify(e,null,2)*/ ); // TODO: add retry logic return Promise.reject(e.statusCode + ' ' + e.response.body); // TODO: throw unknown errors but reject well known errors? }) .catch(function(e) { console.error('vend.js - An unexpected error occurred: ', e); throw e; // TODO: throw unknown errors but reject well known errors? }); }; /** * If tokenService already has a domainPrefix set because the API consumer passed in a full URL * instead of a substitutable one ... then the replace acts as a no-op. * * @param tokenService * @param domain_prefix * @returns {*|XML|string|void} */ var getTokenUrl = function(tokenService, domain_prefix) { var tokenUrl = tokenService.replace(/\{DOMAIN_PREFIX\}/, domain_prefix); log.debug('token Url: '+ tokenUrl); return tokenUrl; }; // the API consumer will get the args and fill in the blanks // the SDK will pull out the non-empty values and execute the request var args = { products: { fetchById: function() { return { apiId: { required: true, value: undefined } }; }, fetch: function() { return { orderBy: { required: false, key: 'order_by', value: undefined // updated_at (default) | id | name }, orderDirection: { required: false, key: 'order_direction', value: undefined // ASC (default) | DESC //TODO: setup enumerations in javascript? }, since: { required: false, key: 'since', value: undefined }, active: { required: false, key: 'active', value: undefined // 0 (or no value) : returns only inactive products // 1 (or any other value) : returns only active products // TODO: can we embed a transformation here? // API consumer will set true or false or 0 or 1 as the value // but SDK will get the 0 or 1 value based on a transformation }, page: { required: false, key: 'page', value: undefined }, pageSize: { required: false, key: 'page_size', value: undefined }/*, domainPrefix: { required: true, value: undefined }, accessToken: { required: true, value: undefined }, retryCounter: { required: true, value: undefined }*/ }; } } }; var fetchProduct = function(args, connectionInfo, retryCounter) { if (!retryCounter) { retryCounter = 0; } else { console.log('retry # ' + retryCounter); } var path = '/api/products/' + args.apiId.value; // this is an undocumented implementation by Vend // the response has to be accessed like: result.products[0] // which is lame ... TODO: should we unwrap it within the SDK? var vendUrl = 'https://' + connectionInfo.domainPrefix + '.vendhq.com' + path; console.log('Requesting vend product ' + vendUrl); var authString = 'Bearer ' + connectionInfo.accessToken; log.debug('GET ' + vendUrl); log.debug('Authorization: ' + authString); // TODO: sensitive data ... do not log? var options = { url: vendUrl, headers: { 'Authorization': authString, 'Accept': 'application/json' } }; return sendRequest(options, args, connectionInfo, fetchProduct, retryCounter); }; var fetchProducts = function(args, connectionInfo, retryCounter) { if (!retryCounter) { retryCounter = 0; } else { console.log('retry # ' + retryCounter); } var path = '/api/products'; var vendUrl = 'https://' + connectionInfo.domainPrefix + '.vendhq.com' + path; var authString = 'Bearer ' + connectionInfo.accessToken; log.debug('GET ' + vendUrl); log.debug('Authorization: ' + authString); // TODO: sensitive data ... do not log? //var domainPrefix = this.domainPrefix; var options = { url: vendUrl, headers: { 'Authorization': authString, 'Accept': 'application/json' }, qs: { order_by: args.orderBy.value, order_direction: args.orderDirection.value, since: args.since.value, active: (args.active.value) ? 1 : 0, page: args.page.value, page_size: args.pageSize.value } }; if (args.page.value) { log.debug('Requesting product page ' + args.page.value); } return sendRequest(options, args, connectionInfo, fetchProducts, retryCounter); }; var createRegisterSale = function(body, connectionInfo, retryCounter) { if (!retryCounter) { retryCounter = 0; } else { console.log('retry # ' + retryCounter); } var path = '/api/register_sales'; var vendUrl = 'https://' + connectionInfo.domainPrefix + '.vendhq.com' + path; var authString = 'Bearer ' + connectionInfo.accessToken; log.debug('GET ' + vendUrl); log.debug('Authorization: ' + authString); // TODO: sensitive data ... do not log? var options = { method: 'POST', url: vendUrl, headers: { 'Authorization': authString, 'Content-Type': 'application/json', 'Accept': 'application/json' } }; return sendRequest(options, args, connectionInfo, fetchProducts, retryCounter); }; var getInitialAccessToken = function(tokenService, clientId, clientSecret, redirectUri, code, domainPrefix, state) { // TODO: tweak winston logs to prefix method signature (like breadcrumbs) when logging? log.debug('getInitialAccessToken - token_service: ' + tokenService); log.debug('getInitialAccessToken - client Id: ' + clientId); log.debug('getInitialAccessToken - client Secret: ' + clientSecret); log.debug('getInitialAccessToken - redirect Uri: ' + redirectUri); log.debug('getInitialAccessToken - code: ' + code); log.debug('getInitialAccessToken - domain_prefix: ' + domainPrefix); log.debug('getInitialAccessToken - state: ' + state); var tokenUrl = getTokenUrl(tokenService, domainPrefix); var options = { url: tokenUrl, headers: { 'Accept': 'application/json' }, form:{ 'grant_type': 'authorization_code', 'client_id': clientId, 'client_secret': clientSecret, 'code': code, 'redirect_uri': redirectUri, 'state': state } }; return request.post(options) .then(successHandler) .catch(RateLimitingError, function(e) { console.log('A RateLimitingError error like "429 Too Many Requests" happened: ' + e.statusCode + ' ' + e.response.body + '\n' + JSON.stringify(e.response.headers,null,2)); }) .catch(ClientError, function(e) { console.log('A ClientError happened: ' + e.statusCode + ' ' + e.response.body + '\n' /*+ JSON.stringify(e.response.headers,null,2) + JSON.stringify(e,null,2)*/ ); // TODO: add retry logic }) .catch(function(e) { console.error('An unexpected error occurred: ', e); }); }; var refreshAccessToken = function(tokenService, clientId, clientSecret, refreshToken, domainPrefix) { // TODO: tweak winston logs to prefix method signature (like breadcrumbs) when logging? log.debug('refreshAccessToken - token service: ' + tokenService); log.debug('refreshAccessToken - client Id: ' + clientId); log.debug('refreshAccessToken - client Secret: ' + clientSecret); log.debug('refreshAccessToken - refresh token: ' + refreshToken); log.debug('refreshAccessToken - domain prefix: ' + domainPrefix); if ( !(tokenService && clientId && clientSecret && refreshToken) ) { return Promise.reject('missing required arguments for refreshAccessToken()'); } var tokenUrl = getTokenUrl(tokenService, domainPrefix); var options = { url: tokenUrl, headers: { 'Accept': 'application/json' }, form:{ 'grant_type': 'refresh_token', 'client_id': clientId, 'client_secret': clientSecret, 'refresh_token': refreshToken } }; return request.post(options) .then(successHandler) .catch(RateLimitingError, function(e) { console.log('A RateLimitingError error like "429 Too Many Requests" happened: ' + e.statusCode + ' ' + e.response.body + '\n' + JSON.stringify(e.response.headers,null,2)); }) .catch(ClientError, function(e) { console.log('A ClientError happened: ' + e.statusCode + ' ' + e.response.body + '\n' /*+ JSON.stringify(e.response.headers,null,2) + JSON.stringify(e,null,2)*/ ); // TODO: add retry logic }) .catch(function(e) { console.error('An unexpected error occurred: ', e); }); }; /** * @param expiresAt - time unit from Vend is in unix epoch format * @returns {*} true if the the token will be considered as expired in 2 mins from now */ var hasAccessTokenExpired = function(expiresAt) { return (moment.unix(expiresAt).isBefore(moment().add(2, 'minutes'))); }; exports.hasAccessTokenExpired = hasAccessTokenExpired; exports.getInitialAccessToken = getInitialAccessToken; exports.refreshAccessToken = refreshAccessToken; exports.args = args; exports.products = { fetch: fetchProducts, fetchById: fetchProduct }; exports.sales = { create: createRegisterSale };
vend.js
'use strict'; var _ = require('underscore'); var moment = require('moment'); var Promise = require('bluebird'); var request = require('request-promise'); //request.debug = true; var log = require('winston'); log.remove(log.transports.Console); log.add(log.transports.Console, {colorize: true, timestamp: false, level: 'debug'}); function RateLimitingError(e) { return e.statusCode == 429; } function AuthNError(e) { return e.statusCode == 401; } function AuthZError(e) { return e.statusCode == 403; } function ClientError(e) { return e.statusCode >= 400 && e.statusCode < 500; } var successHandler = function(response) { if(_.isArray(response)) { console.log('response is an array'); } else if(_.isObject(response)) { console.log('response is an object'); } else if(_.isString(response)) { console.log('response is a string'); try{ var responseObject = JSON.parse(response); //console.log(responseObject); return Promise.resolve(responseObject); } catch(error){ console.error('caught an error: ', error); throw error; } } else { console.log(response); } }; /** * TODO: Should we reuse the following library instead of rolling our own implementation here? * https://github.com/you21979/node-limit-request-promise * * @param bodyObject * @param connectionInfo - contains tokens and domainPrefix * @param retryCounter * @param callback * @returns {*|Parse.Promise} */ var retryWhenRateLimited = function(bodyObject, args, connectionInfo, callback, retryCounter) { if(retryCounter<3) { var retryAfter = 5*60*1000; // by default Vend will never block for more than 5 minutes retryAfter = Math.max(moment(bodyObject['retry-after']).diff(moment()), 0); //retryAfter = 5000; // for sanity testing counter increments quickly console.log('retry after: ' + retryAfter + ' ms'); return Promise.delay(retryAfter) .then(function() { console.log(retryAfter + ' ms have passed...'); return callback(args, connectionInfo, ++retryCounter); }); } }; var retryWhenAuthNFails = function(args, connectionInfo, callback, retryCounter) { if(retryCounter<3) { if ( !(connectionInfo.vendTokenService && connectionInfo.vendClientId && connectionInfo.vendClientSecret && connectionInfo.refreshToken) ) { return Promise.reject('missing required arguments for retryWhenAuthNFails()'); // throw e; // TODO: throw unknown errors but reject well known errors? } console.log('try to fetch a new access token'); return refreshAccessToken( //TODO: validate connectionInfo before using it for retries? connectionInfo.vendTokenService, connectionInfo.vendClientId, connectionInfo.vendClientSecret, connectionInfo.refreshToken, connectionInfo.domainPrefix ) .then(function(oauthInfo) { console.log('update connectionInfo w/ new token before using it again'); if (oauthInfo.access_token) { console.log('received new access_token: ' + oauthInfo.access_token); connectionInfo.accessToken = oauthInfo.access_token; } if (oauthInfo.refresh_token) { console.log('received new refresh_token: ' + oauthInfo.refresh_token); connectionInfo.refreshToken = oauthInfo.refresh_token; } console.log('retrying with new accessToken: ' + connectionInfo.accessToken); return callback(args, connectionInfo, ++retryCounter); }); } }; var sendRequest = function(options, args, connectionInfo, callback, retryCounter) { return request(options) .then(successHandler) .catch(RateLimitingError, function(e) { console.log('A RateLimitingError error like "429 Too Many Requests" happened: \n' + 'statusCode: ' + e.statusCode + '\n' + 'body: ' + e.response.body + '\n' //+ JSON.stringify(e.response.headers,null,2) ); var bodyObject = JSON.parse(e.response.body); console.log(bodyObject['retry-after']); console.log( moment(bodyObject['retry-after']).format('dddd, MMMM Do YYYY, h:mm:ss a ZZ') ); /*successHandler(e.response.body) .then(function(bodyObject){ console.log(bodyObject['retry-after']); console.log( moment(bodyObject['retry-after']).format('dddd, MMMM Do YYYY, h:mm:ss a ZZ') ); });*/ return retryWhenRateLimited(bodyObject, args, connectionInfo, callback, retryCounter); // TODO: how should a catch-block respond if there is a problem within the retry? }) .catch(AuthNError, function(e) { console.log('An AuthNError happened: \n' + 'statusCode: ' + e.statusCode + '\n' + 'body: ' + e.response.body + '\n' /*+ JSON.stringify(e.response.headers,null,2) + JSON.stringify(e,null,2)*/ ); return retryWhenAuthNFails(args, connectionInfo, callback, retryCounter); // TODO: how to prevent a throw or rejection from also stepping thru the other catch-blocks? }) .catch(ClientError, function(e) { console.log('A ClientError happened: \n' + e.statusCode + ' ' + e.response.body + '\n' /*+ JSON.stringify(e.response.headers,null,2) + JSON.stringify(e,null,2)*/ ); // TODO: add retry logic return Promise.reject(e.statusCode + ' ' + e.response.body); // TODO: throw unknown errors but reject well known errors? }) .catch(function(e) { console.error('vend.js - An unexpected error occurred: ', e); throw e; // TODO: throw unknown errors but reject well known errors? }); }; /** * If tokenService already has a domainPrefix set because the API consumer passed in a full URL * instead of a substitutable one ... then the replace acts as a no-op. * * @param tokenService * @param domain_prefix * @returns {*|XML|string|void} */ var getTokenUrl = function(tokenService, domain_prefix) { var tokenUrl = tokenService.replace(/\{DOMAIN_PREFIX\}/, domain_prefix); log.debug('token Url: '+ tokenUrl); return tokenUrl; }; // the API consumer will get the args and fill in the blanks // the SDK will pull out the non-empty values and execute the request var args = { products: { fetchById: function() { return { apiId: { required: true, value: undefined } }; }, fetch: function() { return { orderBy: { required: false, key: 'order_by', value: undefined // updated_at (default) | id | name }, orderDirection: { required: false, key: 'order_direction', value: undefined // ASC (default) | DESC //TODO: setup enumerations in javascript? }, since: { required: false, key: 'since', value: undefined }, active: { required: false, key: 'active', value: undefined // 0 (or no value) : returns only inactive products // 1 (or any other value) : returns only active products // TODO: can we embed a transformation here? // API consumer will set true or false or 0 or 1 as the value // but SDK will get the 0 or 1 value based on a transformation }, page: { required: false, key: 'page', value: undefined }, pageSize: { required: false, key: 'page_size', value: undefined }/*, domainPrefix: { required: true, value: undefined }, accessToken: { required: true, value: undefined }, retryCounter: { required: true, value: undefined }*/ }; } } }; var fetchProduct = function(args, connectionInfo, retryCounter) { if (!retryCounter) { retryCounter = 0; } else { console.log('retry # ' + retryCounter); } var path = '/api/products/' + args.apiId.value; // this is an undocumented implementation by Vend // the response has to be accessed like: result.products[0] // which is lame ... TODO: should we unwrap it within the SDK? var vendUrl = 'https://' + connectionInfo.domainPrefix + '.vendhq.com' + path; console.log('Requesting vend product ' + vendUrl); var authString = 'Bearer ' + connectionInfo.accessToken; log.debug('GET ' + vendUrl); log.debug('Authorization: ' + authString); // TODO: sensitive data ... do not log? var options = { url: vendUrl, headers: { 'Authorization': authString, 'Accept': 'application/json' } }; return sendRequest(options, args, connectionInfo, fetchProduct, retryCounter); }; var fetchProducts = function(args, connectionInfo, retryCounter) { if (!retryCounter) { retryCounter = 0; } else { console.log('retry # ' + retryCounter); } var path = '/api/products'; var vendUrl = 'https://' + connectionInfo.domainPrefix + '.vendhq.com' + path; var authString = 'Bearer ' + connectionInfo.accessToken; log.debug('GET ' + vendUrl); log.debug('Authorization: ' + authString); // TODO: sensitive data ... do not log? //var domainPrefix = this.domainPrefix; var options = { url: vendUrl, headers: { 'Authorization': authString, 'Accept': 'application/json' }, qs: { order_by: args.orderBy.value, order_direction: args.orderDirection.value, since: args.since.value, active: (args.active.value) ? 1 : 0, page: args.page.value, page_size: args.pageSize.value } }; if (args.page.value) { log.debug('Requesting product page ' + args.page.value); } return sendRequest(options, args, connectionInfo, fetchProducts, retryCounter); }; var getInitialAccessToken = function(tokenService, clientId, clientSecret, redirectUri, code, domainPrefix, state) { // TODO: tweak winston logs to prefix method signature (like breadcrumbs) when logging? log.debug('getInitialAccessToken - token_service: ' + tokenService); log.debug('getInitialAccessToken - client Id: ' + clientId); log.debug('getInitialAccessToken - client Secret: ' + clientSecret); log.debug('getInitialAccessToken - redirect Uri: ' + redirectUri); log.debug('getInitialAccessToken - code: ' + code); log.debug('getInitialAccessToken - domain_prefix: ' + domainPrefix); log.debug('getInitialAccessToken - state: ' + state); var tokenUrl = getTokenUrl(tokenService, domainPrefix); var options = { url: tokenUrl, headers: { 'Accept': 'application/json' }, form:{ 'grant_type': 'authorization_code', 'client_id': clientId, 'client_secret': clientSecret, 'code': code, 'redirect_uri': redirectUri, 'state': state } }; return request.post(options) .then(successHandler) .catch(RateLimitingError, function(e) { console.log('A RateLimitingError error like "429 Too Many Requests" happened: ' + e.statusCode + ' ' + e.response.body + '\n' + JSON.stringify(e.response.headers,null,2)); }) .catch(ClientError, function(e) { console.log('A ClientError happened: ' + e.statusCode + ' ' + e.response.body + '\n' /*+ JSON.stringify(e.response.headers,null,2) + JSON.stringify(e,null,2)*/ ); // TODO: add retry logic }) .catch(function(e) { console.error('An unexpected error occurred: ', e); }); }; var refreshAccessToken = function(tokenService, clientId, clientSecret, refreshToken, domainPrefix) { // TODO: tweak winston logs to prefix method signature (like breadcrumbs) when logging? log.debug('refreshAccessToken - token service: ' + tokenService); log.debug('refreshAccessToken - client Id: ' + clientId); log.debug('refreshAccessToken - client Secret: ' + clientSecret); log.debug('refreshAccessToken - refresh token: ' + refreshToken); log.debug('refreshAccessToken - domain prefix: ' + domainPrefix); if ( !(tokenService && clientId && clientSecret && refreshToken) ) { return Promise.reject('missing required arguments for refreshAccessToken()'); } var tokenUrl = getTokenUrl(tokenService, domainPrefix); var options = { url: tokenUrl, headers: { 'Accept': 'application/json' }, form:{ 'grant_type': 'refresh_token', 'client_id': clientId, 'client_secret': clientSecret, 'refresh_token': refreshToken } }; return request.post(options) .then(successHandler) .catch(RateLimitingError, function(e) { console.log('A RateLimitingError error like "429 Too Many Requests" happened: ' + e.statusCode + ' ' + e.response.body + '\n' + JSON.stringify(e.response.headers,null,2)); }) .catch(ClientError, function(e) { console.log('A ClientError happened: ' + e.statusCode + ' ' + e.response.body + '\n' /*+ JSON.stringify(e.response.headers,null,2) + JSON.stringify(e,null,2)*/ ); // TODO: add retry logic }) .catch(function(e) { console.error('An unexpected error occurred: ', e); }); }; /** * @param expiresAt - time unit from Vend is in unix epoch format * @returns {*} true if the the token will be considered as expired in 2 mins from now */ var hasAccessTokenExpired = function(expiresAt) { return (moment.unix(expiresAt).isBefore(moment().add(2, 'minutes'))); }; exports.hasAccessTokenExpired = hasAccessTokenExpired; exports.getInitialAccessToken = getInitialAccessToken; exports.refreshAccessToken = refreshAccessToken; exports.args = args; exports.products = { fetch: fetchProducts, fetchById: fetchProduct };
add the ability to create register sale
vend.js
add the ability to create register sale
<ide><path>end.js <ide> return sendRequest(options, args, connectionInfo, fetchProducts, retryCounter); <ide> }; <ide> <add>var createRegisterSale = function(body, connectionInfo, retryCounter) { <add> if (!retryCounter) { <add> retryCounter = 0; <add> } else { <add> console.log('retry # ' + retryCounter); <add> } <add> <add> var path = '/api/register_sales'; <add> var vendUrl = 'https://' + connectionInfo.domainPrefix + '.vendhq.com' + path; <add> var authString = 'Bearer ' + connectionInfo.accessToken; <add> log.debug('GET ' + vendUrl); <add> log.debug('Authorization: ' + authString); // TODO: sensitive data ... do not log? <add> <add> var options = { <add> method: 'POST', <add> url: vendUrl, <add> headers: { <add> 'Authorization': authString, <add> 'Content-Type': 'application/json', <add> 'Accept': 'application/json' <add> } <add> }; <add> <add> return sendRequest(options, args, connectionInfo, fetchProducts, retryCounter); <add>}; <add> <ide> var getInitialAccessToken = function(tokenService, clientId, clientSecret, redirectUri, code, domainPrefix, state) { <ide> // TODO: tweak winston logs to prefix method signature (like breadcrumbs) when logging? <ide> log.debug('getInitialAccessToken - token_service: ' + tokenService); <ide> fetch: fetchProducts, <ide> fetchById: fetchProduct <ide> }; <add>exports.sales = { <add> create: createRegisterSale <add>};
Java
apache-2.0
2249d8ef622a2d1e09dd863a4244ca9dd0cf7ed1
0
da1z/intellij-community,asedunov/intellij-community,ThiagoGarciaAlves/intellij-community,signed/intellij-community,youdonghai/intellij-community,da1z/intellij-community,allotria/intellij-community,signed/intellij-community,semonte/intellij-community,allotria/intellij-community,asedunov/intellij-community,youdonghai/intellij-community,allotria/intellij-community,apixandru/intellij-community,xfournet/intellij-community,FHannes/intellij-community,da1z/intellij-community,idea4bsd/idea4bsd,asedunov/intellij-community,asedunov/intellij-community,ThiagoGarciaAlves/intellij-community,ThiagoGarciaAlves/intellij-community,ibinti/intellij-community,apixandru/intellij-community,semonte/intellij-community,ThiagoGarciaAlves/intellij-community,mglukhikh/intellij-community,suncycheng/intellij-community,apixandru/intellij-community,suncycheng/intellij-community,idea4bsd/idea4bsd,apixandru/intellij-community,youdonghai/intellij-community,allotria/intellij-community,suncycheng/intellij-community,da1z/intellij-community,xfournet/intellij-community,ThiagoGarciaAlves/intellij-community,asedunov/intellij-community,vvv1559/intellij-community,vvv1559/intellij-community,da1z/intellij-community,signed/intellij-community,FHannes/intellij-community,ThiagoGarciaAlves/intellij-community,mglukhikh/intellij-community,michaelgallacher/intellij-community,asedunov/intellij-community,asedunov/intellij-community,fitermay/intellij-community,semonte/intellij-community,fitermay/intellij-community,FHannes/intellij-community,michaelgallacher/intellij-community,ThiagoGarciaAlves/intellij-community,FHannes/intellij-community,apixandru/intellij-community,michaelgallacher/intellij-community,apixandru/intellij-community,da1z/intellij-community,asedunov/intellij-community,signed/intellij-community,michaelgallacher/intellij-community,fitermay/intellij-community,signed/intellij-community,ibinti/intellij-community,ThiagoGarciaAlves/intellij-community,suncycheng/intellij-community,ibinti/intellij-community,signed/intellij-community,da1z/intellij-community,youdonghai/intellij-community,vvv1559/intellij-community,semonte/intellij-community,mglukhikh/intellij-community,mglukhikh/intellij-community,semonte/intellij-community,idea4bsd/idea4bsd,xfournet/intellij-community,FHannes/intellij-community,apixandru/intellij-community,ibinti/intellij-community,signed/intellij-community,semonte/intellij-community,signed/intellij-community,suncycheng/intellij-community,semonte/intellij-community,idea4bsd/idea4bsd,mglukhikh/intellij-community,idea4bsd/idea4bsd,idea4bsd/idea4bsd,signed/intellij-community,mglukhikh/intellij-community,signed/intellij-community,idea4bsd/idea4bsd,vvv1559/intellij-community,idea4bsd/idea4bsd,mglukhikh/intellij-community,ibinti/intellij-community,suncycheng/intellij-community,fitermay/intellij-community,idea4bsd/idea4bsd,vvv1559/intellij-community,fitermay/intellij-community,suncycheng/intellij-community,allotria/intellij-community,michaelgallacher/intellij-community,apixandru/intellij-community,apixandru/intellij-community,ibinti/intellij-community,ThiagoGarciaAlves/intellij-community,michaelgallacher/intellij-community,vvv1559/intellij-community,mglukhikh/intellij-community,semonte/intellij-community,FHannes/intellij-community,allotria/intellij-community,da1z/intellij-community,allotria/intellij-community,idea4bsd/idea4bsd,apixandru/intellij-community,ibinti/intellij-community,da1z/intellij-community,FHannes/intellij-community,michaelgallacher/intellij-community,youdonghai/intellij-community,FHannes/intellij-community,fitermay/intellij-community,allotria/intellij-community,apixandru/intellij-community,xfournet/intellij-community,youdonghai/intellij-community,vvv1559/intellij-community,apixandru/intellij-community,youdonghai/intellij-community,semonte/intellij-community,mglukhikh/intellij-community,ibinti/intellij-community,xfournet/intellij-community,xfournet/intellij-community,allotria/intellij-community,suncycheng/intellij-community,mglukhikh/intellij-community,asedunov/intellij-community,semonte/intellij-community,fitermay/intellij-community,asedunov/intellij-community,allotria/intellij-community,michaelgallacher/intellij-community,xfournet/intellij-community,fitermay/intellij-community,suncycheng/intellij-community,vvv1559/intellij-community,allotria/intellij-community,allotria/intellij-community,da1z/intellij-community,ThiagoGarciaAlves/intellij-community,suncycheng/intellij-community,ThiagoGarciaAlves/intellij-community,fitermay/intellij-community,youdonghai/intellij-community,ThiagoGarciaAlves/intellij-community,da1z/intellij-community,xfournet/intellij-community,ibinti/intellij-community,da1z/intellij-community,da1z/intellij-community,ibinti/intellij-community,FHannes/intellij-community,signed/intellij-community,xfournet/intellij-community,vvv1559/intellij-community,mglukhikh/intellij-community,apixandru/intellij-community,FHannes/intellij-community,fitermay/intellij-community,vvv1559/intellij-community,xfournet/intellij-community,youdonghai/intellij-community,FHannes/intellij-community,asedunov/intellij-community,youdonghai/intellij-community,semonte/intellij-community,fitermay/intellij-community,FHannes/intellij-community,ibinti/intellij-community,signed/intellij-community,FHannes/intellij-community,mglukhikh/intellij-community,fitermay/intellij-community,xfournet/intellij-community,vvv1559/intellij-community,apixandru/intellij-community,asedunov/intellij-community,semonte/intellij-community,idea4bsd/idea4bsd,youdonghai/intellij-community,vvv1559/intellij-community,ibinti/intellij-community,suncycheng/intellij-community,mglukhikh/intellij-community,michaelgallacher/intellij-community,idea4bsd/idea4bsd,michaelgallacher/intellij-community,idea4bsd/idea4bsd,semonte/intellij-community,allotria/intellij-community,fitermay/intellij-community,suncycheng/intellij-community,vvv1559/intellij-community,youdonghai/intellij-community,asedunov/intellij-community,xfournet/intellij-community,signed/intellij-community,xfournet/intellij-community,michaelgallacher/intellij-community,michaelgallacher/intellij-community,ibinti/intellij-community,youdonghai/intellij-community
/* * Copyright 2000-2013 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jetbrains.plugins.javaFX.fxml; import com.intellij.codeInsight.AnnotationUtil; import com.intellij.codeInsight.daemon.Validator; import com.intellij.codeInsight.daemon.impl.analysis.HighlightUtil; import com.intellij.codeInsight.daemon.impl.analysis.JavaGenericsUtil; import com.intellij.lang.ASTNode; import com.intellij.lang.xml.XMLLanguage; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.fileTypes.StdFileTypes; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.*; import com.intellij.openapi.util.text.StringUtil; import com.intellij.psi.*; import com.intellij.psi.impl.source.PostprocessReformattingAspect; import com.intellij.psi.search.GlobalSearchScope; import com.intellij.psi.search.searches.ClassInheritorsSearch; import com.intellij.psi.search.searches.ReferencesSearch; import com.intellij.psi.util.*; import com.intellij.psi.xml.*; import com.intellij.util.Processor; import com.intellij.xml.XmlAttributeDescriptor; import com.intellij.xml.XmlElementDescriptor; import gnu.trove.THashMap; import gnu.trove.THashSet; import org.jetbrains.annotations.Contract; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.jetbrains.plugins.javaFX.fxml.descriptors.JavaFxClassTagDescriptorBase; import org.jetbrains.plugins.javaFX.fxml.descriptors.JavaFxPropertyTagDescriptor; import org.jetbrains.plugins.javaFX.indexing.JavaFxControllerClassIndex; import java.util.*; import java.util.function.BiConsumer; import java.util.function.Consumer; import java.util.stream.Collectors; /** * User: anna */ public class JavaFxPsiUtil { private static final Logger LOG = Logger.getInstance("#" + JavaFxPsiUtil.class.getName()); public static XmlProcessingInstruction createSingleImportInstruction(String qualifiedName, Project project) { final String importText = "<?import " + qualifiedName + "?>"; final PsiElement child = PsiFileFactory.getInstance(project).createFileFromText("a.fxml", XMLLanguage.INSTANCE, importText).getFirstChild(); return PsiTreeUtil.findChildOfType(child, XmlProcessingInstruction.class); } public static List<String> parseImports(XmlFile file) { return parseInstructions(file, "import"); } public static List<String> parseInjectedLanguages(XmlFile file) { return parseInstructions(file, "language"); } private static List<String> parseInstructions(XmlFile file, String instructionName) { final List<String> definedImports = new ArrayList<>(); final XmlDocument document = file.getDocument(); if (document != null) { final XmlProlog prolog = document.getProlog(); final Collection<XmlProcessingInstruction> instructions = new ArrayList<>(PsiTreeUtil.findChildrenOfType(prolog, XmlProcessingInstruction.class)); for (final XmlProcessingInstruction instruction : instructions) { final String instructionTarget = getInstructionTarget(instructionName, instruction); if (instructionTarget != null) { definedImports.add(instructionTarget); } } } return definedImports; } @Nullable public static String getInstructionTarget(String instructionName, XmlProcessingInstruction instruction) { final ASTNode node = instruction.getNode(); ASTNode xmlNameNode = node.findChildByType(XmlTokenType.XML_NAME); ASTNode importNode = node.findChildByType(XmlTokenType.XML_TAG_CHARACTERS); if (!(xmlNameNode == null || !instructionName.equals(xmlNameNode.getText()) || importNode == null)) { return importNode.getText(); } return null; } public static PsiClass findPsiClass(String name, PsiElement context) { final Project project = context.getProject(); if (!StringUtil.getShortName(name).equals(name)) { final PsiClass psiClass = JavaPsiFacade.getInstance(project).findClass(name, GlobalSearchScope.allScope(project)); if (psiClass != null) { return psiClass; } return findNestedPsiClass(name, context, project); } return findPsiClass(name, parseImports((XmlFile)context.getContainingFile()), context, project); } private static PsiClass findNestedPsiClass(String name, PsiElement context, Project project) { final int dotIndex = name.indexOf('.'); if (dotIndex > 0) { final String outerName = name.substring(0, dotIndex); final PsiClass outerClass = findPsiClass(outerName, parseImports((XmlFile)context.getContainingFile()), context, project); if (outerClass != null) { final List<String> nameChain = StringUtil.split(name, ".", true, false); final List<String> nestedNames = nameChain.subList(1, nameChain.size()); PsiClass aClass = outerClass; for (String nestedName : nestedNames) { aClass = aClass.findInnerClassByName(nestedName, true); if (aClass == null) return null; } return aClass; } } return null; } private static PsiClass findPsiClass(String name, List<String> imports, PsiElement context, Project project) { PsiClass psiClass = null; if (imports != null) { JavaPsiFacade psiFacade = JavaPsiFacade.getInstance(project); PsiFile file = context.getContainingFile(); for (String anImport : imports) { if (StringUtil.getShortName(anImport).equals(name)) { psiClass = psiFacade.findClass(anImport, file.getResolveScope()); } else if (StringUtil.endsWith(anImport, ".*")) { psiClass = psiFacade.findClass(StringUtil.trimEnd(anImport, "*") + name, file.getResolveScope()); } if (psiClass != null) { return psiClass; } } } return null; } public static void insertImportWhenNeeded(XmlFile xmlFile, String shortName, String qualifiedName) { if (shortName != null && qualifiedName != null && findPsiClass(shortName, xmlFile.getRootTag()) == null) { final XmlDocument document = xmlFile.getDocument(); if (document != null) { final XmlProcessingInstruction processingInstruction = createSingleImportInstruction(qualifiedName, xmlFile.getProject()); final XmlProlog prolog = document.getProlog(); if (prolog != null) { prolog.add(processingInstruction); } else { document.addBefore(processingInstruction, document.getRootTag()); } PostprocessReformattingAspect.getInstance(xmlFile.getProject()).doPostponedFormatting(xmlFile.getViewProvider()); } } } public static PsiClass getPropertyClass(PsiElement member) { final PsiClassType classType = getPropertyClassType(member); return classType != null ? classType.resolve() : null; } public static PsiClassType getPropertyClassType(PsiElement member) { return getPropertyClassType(member, JavaFxCommonNames.JAVAFX_BEANS_PROPERTY_OBJECT_PROPERTY); } public static PsiClassType getPropertyClassType(PsiElement member, final String superTypeFQN) { if (member instanceof PsiMember) { final PsiType type = PropertyUtil.getPropertyType((PsiMember)member); if (type instanceof PsiClassType) { final PsiClassType.ClassResolveResult resolveResult = ((PsiClassType)type).resolveGenerics(); final PsiClass attributeClass = resolveResult.getElement(); if (attributeClass != null) { final PsiClass objectProperty = JavaPsiFacade.getInstance(attributeClass.getProject()) .findClass(superTypeFQN, attributeClass.getResolveScope()); if (objectProperty != null) { final PsiSubstitutor superClassSubstitutor = TypeConversionUtil .getClassSubstitutor(objectProperty, attributeClass, resolveResult.getSubstitutor()); if (superClassSubstitutor != null) { final PsiType propertyType = superClassSubstitutor.substitute(objectProperty.getTypeParameters()[0]); if (propertyType instanceof PsiClassType) { return (PsiClassType)propertyType; } } else { return (PsiClassType)type; } } } } } return null; } public static PsiMethod findStaticPropertySetter(String attributeName, XmlTag context) { final String packageName = StringUtil.getPackageName(attributeName); if (context != null && !StringUtil.isEmptyOrSpaces(packageName)) { final PsiClass classWithStaticProperty = findPsiClass(packageName, context); if (classWithStaticProperty != null) { return findStaticPropertySetter(attributeName, classWithStaticProperty); } } return null; } @Nullable public static PsiMethod findStaticPropertySetter(@NotNull String attributeName, @Nullable PsiClass classWithStaticProperty) { if (classWithStaticProperty == null) return null; final String setterName = PropertyUtil.suggestSetterName(StringUtil.getShortName(attributeName)); final PsiMethod[] setters = classWithStaticProperty.findMethodsByName(setterName, true); for (PsiMethod setter : setters) { if (setter.hasModifierProperty(PsiModifier.PUBLIC) && setter.hasModifierProperty(PsiModifier.STATIC) && setter.getParameterList().getParametersCount() == 2) { return setter; } } return null; } public static PsiMethod findPropertyGetter(@NotNull PsiClass psiClass, @Nullable String propertyName) { if (StringUtil.isEmpty(propertyName)) return null; PsiMethod getter = findPropertyGetter(psiClass, propertyName, null); if (getter != null) { return getter; } return findPropertyGetter(psiClass, propertyName, PsiType.BOOLEAN); } private static PsiMethod findPropertyGetter(final PsiClass psiClass, final String propertyName, final PsiType propertyType) { final String getterName = PropertyUtil.suggestGetterName(propertyName, propertyType); final PsiMethod[] getters = psiClass.findMethodsByName(getterName, true); for (PsiMethod getter : getters) { if (getter.hasModifierProperty(PsiModifier.PUBLIC) && !getter.hasModifierProperty(PsiModifier.STATIC) && PropertyUtil.isSimplePropertyGetter(getter)) { return getter; } } return null; } public static PsiMethod findObservablePropertyGetter(@NotNull PsiClass psiClass, @Nullable String propertyName) { if (StringUtil.isEmpty(propertyName)) return null; final PsiMethod[] getters = psiClass.findMethodsByName(propertyName + JavaFxCommonNames.PROPERTY_METHOD_SUFFIX, true); for (PsiMethod getter : getters) { if (getter.hasModifierProperty(PsiModifier.PUBLIC) && !getter.hasModifierProperty(PsiModifier.STATIC) && getter.getParameterList().getParametersCount() == 0 && InheritanceUtil.isInheritor(getter.getReturnType(), JavaFxCommonNames.JAVAFX_BEANS_VALUE_OBSERVABLE_VALUE)) { return getter; } } return null; } private static final Key<CachedValue<PsiClass>> INJECTED_CONTROLLER = Key.create("javafx.injected.controller"); private static final RecursionGuard ourGuard = RecursionManager.createGuard("javafx.controller"); public static PsiClass getControllerClass(final PsiFile containingFile) { if (containingFile instanceof XmlFile) { final XmlTag rootTag = ((XmlFile)containingFile).getRootTag(); final Project project = containingFile.getProject(); if (rootTag != null) { XmlAttribute attribute = rootTag.getAttribute(FxmlConstants.FX_CONTROLLER); if (attribute != null) { final PsiClass controllerClass = findControllerClass(containingFile, project, attribute); if (controllerClass != null) { return controllerClass; } } } final CachedValuesManager manager = CachedValuesManager.getManager(containingFile.getProject()); final PsiClass injectedControllerClass = manager.getCachedValue( containingFile, INJECTED_CONTROLLER, () -> computeInjectedControllerClass(containingFile), true); if (injectedControllerClass != null) { return injectedControllerClass; } if (rootTag != null && FxmlConstants.FX_ROOT.equals(rootTag.getName())) { final XmlAttribute rootTypeAttr = rootTag.getAttribute(FxmlConstants.TYPE); if (rootTypeAttr != null) { return findControllerClass(containingFile, project, rootTypeAttr); } } } return null; } private static PsiClass findControllerClass(PsiFile containingFile, Project project, XmlAttribute attribute) { final String attributeValue = attribute.getValue(); if (!StringUtil.isEmptyOrSpaces(attributeValue)) { final GlobalSearchScope customScope = GlobalSearchScope.projectScope(project).intersectWith(containingFile.getResolveScope()); return JavaPsiFacade.getInstance(project).findClass(attributeValue, customScope); } return null; } public static boolean isEventHandlerProperty(@NotNull XmlAttribute attribute) { final PsiClass tagClass = getTagClass(attribute.getParent()); return tagClass != null && getEventHandlerPropertyType(tagClass, attribute.getName()) != null; } @Nullable public static PsiClass getTagClass(@Nullable XmlAttributeValue xmlAttributeValue) { if (xmlAttributeValue != null) { final PsiElement parent = xmlAttributeValue.getParent(); if (parent instanceof XmlAttribute) { final XmlTag xmlTag = ((XmlAttribute)parent).getParent(); return getTagClass(xmlTag); } } return null; } @Nullable public static PsiClass getTagClass(@Nullable XmlTag xmlTag) { if (xmlTag != null) { final XmlElementDescriptor descriptor = xmlTag.getDescriptor(); if (descriptor != null) { final PsiElement declaration = descriptor.getDeclaration(); if (declaration instanceof PsiClass) { return (PsiClass)declaration; } } } return null; } @Nullable public static PsiElement getAttributeDeclaration(@Nullable XmlAttributeValue xmlAttributeValue) { if (xmlAttributeValue != null) { final PsiElement parent = xmlAttributeValue.getParent(); if (parent instanceof XmlAttribute) { final XmlAttributeDescriptor descriptor = ((XmlAttribute)parent).getDescriptor(); if (descriptor != null) { return descriptor.getDeclaration(); } } } return null; } public static boolean isVisibleInFxml(@NotNull PsiMember psiMember) { return psiMember.hasModifierProperty(PsiModifier.PUBLIC) || AnnotationUtil.isAnnotated(psiMember, JavaFxCommonNames.JAVAFX_FXML_ANNOTATION, false); } @Nullable public static PsiMethod findValueOfMethod(@NotNull final PsiType psiType) { final PsiClass psiClass = PsiUtil.resolveClassInClassTypeOnly(psiType); return psiClass != null ? findValueOfMethod(psiClass) : null; } @Nullable public static PsiMethod findValueOfMethod(@NotNull final PsiClass psiClass) { return CachedValuesManager.getCachedValue(psiClass, () -> { final PsiMethod[] methods = psiClass.findMethodsByName(JavaFxCommonNames.VALUE_OF, true); for (PsiMethod method : methods) { if (method.hasModifierProperty(PsiModifier.STATIC)) { final PsiParameter[] parameters = method.getParameterList().getParameters(); if (parameters.length == 1) { final PsiType type = parameters[0].getType(); if (type.equalsToText(CommonClassNames.JAVA_LANG_STRING) || type.equalsToText(CommonClassNames.JAVA_LANG_OBJECT)) { if (psiClass.equals(PsiUtil.resolveClassInType(method.getReturnType()))) { return CachedValueProvider.Result.create(method, PsiModificationTracker.JAVA_STRUCTURE_MODIFICATION_COUNT); } } } } } return CachedValueProvider.Result.create(null, PsiModificationTracker.JAVA_STRUCTURE_MODIFICATION_COUNT); }); } public static boolean isReadOnly(String attributeName, XmlTag tag) { if (findStaticPropertySetter(attributeName, tag) != null) return false; final XmlElementDescriptor descriptor = tag.getDescriptor(); if (descriptor instanceof JavaFxClassTagDescriptorBase) { return ((JavaFxClassTagDescriptorBase)descriptor).isReadOnlyAttribute(attributeName); } return false; } public static boolean isExpressionBinding(@Nullable String value) { return value != null && value.startsWith("${") && value.endsWith("}"); } public static boolean isIncorrectExpressionBinding(@Nullable String value) { if (value == null || !value.startsWith("$")) return false; if (value.length() == 1) return true; final boolean expressionStarts = value.startsWith("${"); final boolean expressionEnds = value.endsWith("}"); if (expressionStarts && expressionEnds && value.length() == 3) return true; if (expressionStarts != expressionEnds) return true; if (expressionStarts && value.indexOf('{', 2) >= 2) return true; if (expressionEnds && value.indexOf('}') < value.length() - 1) return true; return false; } @Nullable public static PsiType getWritablePropertyType(@Nullable final PsiType type, @NotNull final Project project) { final PsiClassType.ClassResolveResult resolveResult = PsiUtil.resolveGenericsClassInType(type); final PsiClass psiClass = resolveResult.getElement(); if (psiClass != null) { final PsiClass propertyClass = JavaPsiFacade.getInstance(project).findClass(JavaFxCommonNames.JAVAFX_BEANS_PROPERTY, GlobalSearchScope.allScope(project)); if (propertyClass != null) { final PsiSubstitutor substitutor = TypeConversionUtil.getClassSubstitutor(propertyClass, psiClass, resolveResult.getSubstitutor()); if (substitutor != null) { return substitutor.substitute(propertyClass.getTypeParameters()[0]); } } } return null; } @Nullable private static PsiType getDefaultPropertyExpectedType(@Nullable PsiClass aClass) { if (aClass == null) return null; return CachedValuesManager.getCachedValue(aClass, () -> { final PsiAnnotation annotation = AnnotationUtil.findAnnotationInHierarchy(aClass, Collections.singleton(JavaFxCommonNames.JAVAFX_BEANS_DEFAULT_PROPERTY)); if (annotation != null) { final PsiAnnotationMemberValue memberValue = annotation.findAttributeValue(null); if (memberValue != null) { final String propertyName = StringUtil.unquoteString(memberValue.getText()); final PsiMethod getter = findPropertyGetter(aClass, propertyName); if (getter != null) { final PsiType propertyType = eraseFreeTypeParameters(getter.getReturnType(), getter); return CachedValueProvider.Result.create(propertyType, PsiModificationTracker.JAVA_STRUCTURE_MODIFICATION_COUNT); } } } return CachedValueProvider.Result.create(null, PsiModificationTracker.JAVA_STRUCTURE_MODIFICATION_COUNT); }); } public static String getDefaultPropertyName(@Nullable PsiClass aClass) { if (aClass == null) { return null; } final PsiAnnotation annotation = AnnotationUtil.findAnnotationInHierarchy(aClass, Collections.singleton( JavaFxCommonNames.JAVAFX_BEANS_DEFAULT_PROPERTY)); if (annotation != null) { final PsiAnnotationMemberValue memberValue = annotation.findAttributeValue(null); if (memberValue != null) { return StringUtil.unquoteString(memberValue.getText()); } } return null; } public static boolean isAbleToInstantiate(@NotNull PsiClass psiClass) { return isAbleToInstantiate(psiClass, message -> { }); } public static boolean isAbleToInstantiate(@NotNull PsiClass psiClass, @NotNull Consumer<String> messageConsumer) { if (psiClass.isEnum() || hasNamedArgOrNoArgConstructor(psiClass)) return true; final PsiMethod valueOf = findValueOfMethod(psiClass); if (valueOf == null) { if (!hasBuilder(psiClass)) { messageConsumer.accept("Unable to instantiate"); return false; } } return true; } private static boolean hasNamedArgOrNoArgConstructor(@NotNull PsiClass psiClass) { if (psiClass.getConstructors().length == 0) return true; return CachedValuesManager.getCachedValue(psiClass, () -> { for (PsiMethod constructor : psiClass.getConstructors()) { final PsiParameter[] parameters = constructor.getParameterList().getParameters(); if (parameters.length == 0) { return CachedValueProvider.Result.create(true, PsiModificationTracker.JAVA_STRUCTURE_MODIFICATION_COUNT); } boolean annotated = true; for (PsiParameter parameter : parameters) { if (!AnnotationUtil.isAnnotated(parameter, JavaFxCommonNames.JAVAFX_BEANS_NAMED_ARG, false)) { annotated = false; break; } } if (annotated) return CachedValueProvider.Result.create(true, PsiModificationTracker.JAVA_STRUCTURE_MODIFICATION_COUNT); } return CachedValueProvider.Result.create(false, PsiModificationTracker.JAVA_STRUCTURE_MODIFICATION_COUNT); }); } public static boolean hasBuilder(@NotNull final PsiClass psiClass) { return CachedValuesManager.getCachedValue(psiClass, () -> { final Project project = psiClass.getProject(); final PsiClass builderClass = JavaPsiFacade.getInstance(project).findClass(JavaFxCommonNames.JAVAFX_FXML_BUILDER, GlobalSearchScope.allScope(project)); if (builderClass != null) { final PsiMethod[] buildMethods = builderClass.findMethodsByName("build", false); if (buildMethods.length == 1 && buildMethods[0].getParameterList().getParametersCount() == 0) { if (ClassInheritorsSearch.search(builderClass).forEach(aClass -> { PsiType returnType = null; final PsiMethod method = MethodSignatureUtil.findMethodBySuperMethod(aClass, buildMethods[0], false); if (method != null) { returnType = method.getReturnType(); } return !Comparing.equal(psiClass, PsiUtil.resolveClassInClassTypeOnly(returnType)); })) { return CachedValueProvider.Result.create(false, PsiModificationTracker.JAVA_STRUCTURE_MODIFICATION_COUNT); } } } return CachedValueProvider.Result.create(true, PsiModificationTracker.JAVA_STRUCTURE_MODIFICATION_COUNT); }); } public static boolean isClassAcceptable(@Nullable XmlTag targetTag, @Nullable final PsiClass fromClass) { return isClassAcceptable(targetTag, fromClass, (message, type) -> { }); } public static boolean isClassAcceptable(@Nullable XmlTag targetTag, @Nullable final PsiClass fromClass, @NotNull BiConsumer<String, Validator.ValidationHost.ErrorType> messageConsumer) { if (targetTag == null || fromClass == null || !fromClass.isValid()) { return true; } final XmlElementDescriptor tagDescriptor = targetTag.getDescriptor(); if (tagDescriptor instanceof JavaFxPropertyTagDescriptor) { final PsiClass containingClass = ((JavaFxPropertyTagDescriptor)tagDescriptor).getPsiClass(); final PsiType targetType = getWritablePropertyType(containingClass, tagDescriptor.getDeclaration()); return canCoerce(targetType, fromClass, targetTag, messageConsumer); } else if (tagDescriptor instanceof JavaFxClassTagDescriptorBase) { final PsiElement tagDeclaration = tagDescriptor.getDeclaration(); if (tagDeclaration instanceof PsiClass) { PsiClass defaultPropertyOwnerClass = (PsiClass)tagDeclaration; final XmlAttribute factoryAttr = targetTag.getAttribute(FxmlConstants.FX_FACTORY); if (factoryAttr != null) { defaultPropertyOwnerClass = getFactoryProducedClass((PsiClass)tagDeclaration, factoryAttr.getValue()); } final PsiType targetType = getDefaultPropertyExpectedType(defaultPropertyOwnerClass); if (targetType != null) { return canCoerce(targetType, fromClass, targetTag, messageConsumer); } if (!isObservableCollection(defaultPropertyOwnerClass)) { return noDefaultPropertyError(messageConsumer); } } } return true; } private static boolean noDefaultPropertyError(@NotNull BiConsumer<String, Validator.ValidationHost.ErrorType> messageConsumer) { messageConsumer.accept("Parent tag has no default property", Validator.ValidationHost.ErrorType.ERROR); return false; } private static boolean canCoerce(@Nullable PsiType targetType, @NotNull PsiClass fromClass, @NotNull PsiElement context, @NotNull BiConsumer<String, Validator.ValidationHost.ErrorType> messageConsumer) { if (targetType == null) return true; PsiType collectionItemType = JavaGenericsUtil.getCollectionItemType(targetType, fromClass.getResolveScope()); if (collectionItemType == null && InheritanceUtil.isInheritor(targetType, JavaFxCommonNames.JAVAFX_BEANS_PROPERTY)) { collectionItemType = getWritablePropertyType(targetType, fromClass.getProject()); } if (collectionItemType != null) { return canCoerceImpl(collectionItemType, fromClass, context, messageConsumer); } return canCoerceImpl(targetType, fromClass, context, messageConsumer); } @Nullable private static PsiType eraseFreeTypeParameters(@Nullable PsiType psiType, @NotNull PsiMember member) { final PsiClass containingClass = member.getContainingClass(); return eraseFreeTypeParameters(psiType, containingClass); } @Nullable private static PsiType eraseFreeTypeParameters(@Nullable PsiType psiType, @Nullable PsiClass containingClass) { if (containingClass == null) return null; return JavaPsiFacade.getElementFactory(containingClass.getProject()).createRawSubstitutor(containingClass).substitute(psiType); } private static boolean canCoerceImpl(@NotNull PsiType targetType, @NotNull PsiClass fromClass, @NotNull PsiElement context, @NotNull BiConsumer<String, Validator.ValidationHost.ErrorType> messageConsumer) { if (targetType.equalsToText(CommonClassNames.JAVA_LANG_OBJECT) || targetType.equalsToText(CommonClassNames.JAVA_LANG_STRING) || targetType.isAssignableFrom(PsiTypesUtil.getClassType(fromClass))) { return true; } final PsiClassType boxedTargetClass = targetType instanceof PsiPrimitiveType ? ((PsiPrimitiveType)targetType).getBoxedType(context) : null; if (boxedTargetClass != null && InheritanceUtil.isInheritor(boxedTargetClass, CommonClassNames.JAVA_LANG_NUMBER) || InheritanceUtil.isInheritor(targetType, CommonClassNames.JAVA_LANG_NUMBER)) { if (Comparing.strEqual(fromClass.getQualifiedName(), CommonClassNames.JAVA_LANG_STRING) || InheritanceUtil.isInheritor(fromClass, CommonClassNames.JAVA_LANG_NUMBER)) { return true; } return unrelatedTypesWarning(targetType, fromClass, messageConsumer); } final PsiMethod valueOfMethod = findValueOfMethod(targetType); final PsiType valueOfParameterType = valueOfMethod != null && valueOfMethod.getParameterList().getParametersCount() == 1 ? valueOfMethod.getParameterList().getParameters()[0].getType() : null; if (valueOfParameterType != null && valueOfParameterType.equalsToText(CommonClassNames.JAVA_LANG_OBJECT)) { return true; } if (Comparing.strEqual(fromClass.getQualifiedName(), CommonClassNames.JAVA_LANG_STRING)) { if (isPrimitiveOrBoxed(targetType) || valueOfParameterType != null && valueOfParameterType.equalsToText(CommonClassNames.JAVA_LANG_STRING)) { return true; } } if (valueOfMethod != null) { return unrelatedTypesWarning(targetType, fromClass, messageConsumer); } return unableToCoerceError(targetType, fromClass, messageConsumer); } private static boolean unableToCoerceError(@NotNull PsiType targetType, @NotNull PsiClass fromClass, @NotNull BiConsumer<String, Validator.ValidationHost.ErrorType> messageConsumer) { messageConsumer.accept("Unable to coerce " + HighlightUtil.formatClass(fromClass) + " to " + targetType.getCanonicalText(), Validator.ValidationHost.ErrorType.ERROR); return false; } private static boolean unrelatedTypesWarning(@NotNull PsiType targetType, @NotNull PsiClass fromClass, @NotNull BiConsumer<String, Validator.ValidationHost.ErrorType> messageConsumer) { messageConsumer.accept("Conversion between unrelated types, " + HighlightUtil.formatClass(fromClass) + " to " + targetType.getCanonicalText(), Validator.ValidationHost.ErrorType.WARNING); return true; } public static boolean isOutOfHierarchy(final XmlAttributeValue element) { XmlTag tag = PsiTreeUtil.getParentOfType(element, XmlTag.class); while (tag != null) { if (FxmlConstants.FX_DEFINE.equals(tag.getName())) { return true; } tag = tag.getParentTag(); } return false; } public static PsiType getWrappedPropertyType(final PsiField field, final Project project, final Map<String, PsiType> typeMap) { return CachedValuesManager.getCachedValue(field, () -> { final PsiType fieldType = field.getType(); final PsiClassType.ClassResolveResult resolveResult = PsiUtil.resolveGenericsClassInType(fieldType); final PsiClass fieldClass = resolveResult.getElement(); if (fieldClass == null) { final PsiType propertyType = eraseFreeTypeParameters(fieldType, field); return CachedValueProvider.Result.create(propertyType, PsiModificationTracker.JAVA_STRUCTURE_MODIFICATION_COUNT); } PsiType substitute = null; for (String typeName : typeMap.keySet()) { if (InheritanceUtil.isInheritor(fieldType, typeName)) { substitute = typeMap.get(typeName); break; } } if (substitute == null) { if (!InheritanceUtil.isInheritor(fieldType, JavaFxCommonNames.JAVAFX_BEANS_VALUE_OBSERVABLE_VALUE)) { final PsiType propertyType = eraseFreeTypeParameters(fieldType, field); return CachedValueProvider.Result.create(propertyType, PsiModificationTracker.JAVA_STRUCTURE_MODIFICATION_COUNT); } final PsiClass aClass = JavaPsiFacade.getInstance(project) .findClass(JavaFxCommonNames.JAVAFX_BEANS_VALUE_OBSERVABLE_VALUE, GlobalSearchScope.allScope(project)); LOG.assertTrue(aClass != null); final PsiSubstitutor substitutor = TypeConversionUtil.getSuperClassSubstitutor(aClass, fieldClass, resolveResult.getSubstitutor()); final PsiMethod[] values = aClass.findMethodsByName(JavaFxCommonNames.GET_VALUE, false); LOG.assertTrue(values.length == 1); substitute = substitutor.substitute(values[0].getReturnType()); } final PsiType propertyType = eraseFreeTypeParameters(substitute, field); return CachedValueProvider.Result.create(propertyType, PsiModificationTracker.JAVA_STRUCTURE_MODIFICATION_COUNT); }); } @Nullable public static PsiType getWritablePropertyType(@Nullable PsiClass containingClass, @Nullable PsiElement declaration) { if (declaration instanceof PsiField) { return getWrappedPropertyType((PsiField)declaration, declaration.getProject(), JavaFxCommonNames.ourWritableMap); } if (declaration instanceof PsiMethod) { final PsiMethod method = (PsiMethod)declaration; if (method.getParameterList().getParametersCount() != 0) { return getSetterArgumentType(method); } final String propertyName = PropertyUtil.getPropertyName(method); final PsiClass psiClass = containingClass != null ? containingClass : method.getContainingClass(); if (propertyName != null && containingClass != null) { final PsiMethod setter = findInstancePropertySetter(psiClass, propertyName); if (setter != null) { final PsiType setterArgumentType = getSetterArgumentType(setter); if (setterArgumentType != null) return setterArgumentType; } } return getGetterReturnType(method); } return null; } @Nullable private static PsiType getSetterArgumentType(@NotNull PsiMethod method) { return CachedValuesManager.getCachedValue(method, () -> { final PsiParameter[] parameters = method.getParameterList().getParameters(); final boolean isStatic = method.hasModifierProperty(PsiModifier.STATIC); if (isStatic && parameters.length == 2 || !isStatic && parameters.length == 1) { final PsiType argumentType = eraseFreeTypeParameters(parameters[parameters.length - 1].getType(), method); return CachedValueProvider.Result.create(argumentType, PsiModificationTracker.JAVA_STRUCTURE_MODIFICATION_COUNT); } return CachedValueProvider.Result.create(null, PsiModificationTracker.JAVA_STRUCTURE_MODIFICATION_COUNT); }); } private static PsiType getGetterReturnType(@NotNull PsiMethod method) { return CachedValuesManager.getCachedValue(method, () -> { final PsiType returnType = eraseFreeTypeParameters(method.getReturnType(), method); return CachedValueProvider.Result.create(returnType, PsiModificationTracker.JAVA_STRUCTURE_MODIFICATION_COUNT); }); } @Nullable public static PsiType getReadablePropertyType(@Nullable PsiElement declaration) { if (declaration instanceof PsiField) { return getWrappedPropertyType((PsiField)declaration, declaration.getProject(), JavaFxCommonNames.ourReadOnlyMap); } if (declaration instanceof PsiMethod) { PsiMethod psiMethod = (PsiMethod)declaration; if (psiMethod.getParameterList().getParametersCount() == 0 && !psiMethod.hasModifierProperty(PsiModifier.STATIC)) { return getGetterReturnType(psiMethod); } } return null; } @NotNull public static Map<String, XmlAttributeValue> collectFileIds(@Nullable final XmlTag currentTag) { if (currentTag == null) return Collections.emptyMap(); final PsiFile containingFile = currentTag.getContainingFile(); final XmlAttribute currentIdAttribute = currentTag.getAttribute(FxmlConstants.FX_ID); return collectFileIds(containingFile, currentIdAttribute != null ? currentIdAttribute.getValue() : null); } @NotNull public static Map<String, XmlAttributeValue> collectFileIds(@Nullable PsiFile psiFile, @Nullable String skipFxId) { if (!(psiFile instanceof XmlFile)) return Collections.emptyMap(); final XmlTag rootTag = ((XmlFile)psiFile).getRootTag(); if (rootTag == null) return Collections.emptyMap(); final Map<String, XmlAttributeValue> cachedIds = CachedValuesManager .getCachedValue(rootTag, () -> new CachedValueProvider.Result<>(prepareFileIds(rootTag), PsiModificationTracker.MODIFICATION_COUNT)); if (skipFxId != null && cachedIds.containsKey(skipFxId)) { final Map<String, XmlAttributeValue> filteredIds = new THashMap<>(cachedIds); filteredIds.remove(skipFxId); return filteredIds; } return cachedIds; } @NotNull private static Map<String, XmlAttributeValue> prepareFileIds(XmlTag rootTag) { final Map<String, XmlAttributeValue> fileIds = new THashMap<>(); for (XmlTag tag : SyntaxTraverser.psiTraverser().withRoot(rootTag).filter(XmlTag.class)) { final XmlAttribute idAttribute = tag.getAttribute(FxmlConstants.FX_ID); if (idAttribute != null) { final String idValue = idAttribute.getValue(); if (idValue != null) fileIds.put(idValue, idAttribute.getValueElement()); } } final XmlAttribute controllerAttribute = rootTag.getAttribute(FxmlConstants.FX_CONTROLLER); if (controllerAttribute != null) { fileIds.put(FxmlConstants.CONTROLLER, controllerAttribute.getValueElement()); } return fileIds; } @Nullable public static PsiClass getTagClassById(@Nullable XmlAttributeValue xmlAttributeValue, @Nullable String id, @NotNull PsiElement context) { return FxmlConstants.CONTROLLER.equals(id) ? getControllerClass(context.getContainingFile()) : getTagClass(xmlAttributeValue); } @Nullable public static PsiClass getWritablePropertyClass(@Nullable XmlAttributeValue xmlAttributeValue) { if (xmlAttributeValue != null) { return getPropertyClass(getWritablePropertyType(xmlAttributeValue), xmlAttributeValue); } return null; } @Nullable public static PsiType getWritablePropertyType(@Nullable XmlAttributeValue xmlAttributeValue) { final PsiClass tagClass = getTagClass(xmlAttributeValue); if (tagClass != null) { final PsiElement declaration = getAttributeDeclaration(xmlAttributeValue); if (declaration != null) { return getWritablePropertyType(tagClass, declaration); } } return null; } @Nullable public static PsiClass getPropertyClass(@Nullable PsiType propertyType, @NotNull PsiElement context) { if (propertyType instanceof PsiPrimitiveType) { PsiClassType boxedType = ((PsiPrimitiveType)propertyType).getBoxedType(context); return boxedType != null ? boxedType.resolve() : null; } return PsiUtil.resolveClassInType(propertyType); } public static boolean hasConversionFromAnyType(@NotNull PsiClass targetClass) { return Comparing.strEqual(targetClass.getQualifiedName(), CommonClassNames.JAVA_LANG_STRING) || findValueOfMethod(targetClass) != null; } @Nullable public static String getBoxedPropertyType(@Nullable PsiClass containingClass, @Nullable PsiMember declaration) { PsiType psiType = getWritablePropertyType(containingClass, declaration); if (psiType instanceof PsiPrimitiveType) { return ((PsiPrimitiveType)psiType).getBoxedTypeName(); } if (PsiPrimitiveType.getUnboxedType(psiType) != null) { final PsiClass psiClass = PsiUtil.resolveClassInType(psiType); if (psiClass != null) { return psiClass.getQualifiedName(); } } return null; } @Contract("null->false") public static boolean isPrimitiveOrBoxed(@Nullable PsiType psiType) { return psiType instanceof PsiPrimitiveType || PsiPrimitiveType.getUnboxedType(psiType) != null; } @NotNull public static Map<String, PsiMember> collectReadableProperties(@Nullable PsiClass psiClass) { if (psiClass != null) { return CachedValuesManager.getCachedValue(psiClass, () -> CachedValueProvider.Result.create(prepareReadableProperties(psiClass), PsiModificationTracker.JAVA_STRUCTURE_MODIFICATION_COUNT)); } return Collections.emptyMap(); } @NotNull private static Map<String, PsiMember> prepareReadableProperties(@NotNull PsiClass psiClass) { final Map<String, PsiMember> acceptableMembers = new THashMap<>(); for (PsiMethod method : psiClass.getAllMethods()) { if (method.hasModifierProperty(PsiModifier.STATIC) || !method.hasModifierProperty(PsiModifier.PUBLIC)) continue; if (PropertyUtil.isSimplePropertyGetter(method)) { final String propertyName = PropertyUtil.getPropertyName(method); assert propertyName != null; acceptableMembers.put(propertyName, method); } } return acceptableMembers; } @NotNull public static Map<String, PsiMember> collectWritableProperties(@Nullable PsiClass psiClass) { if (psiClass != null) { return CachedValuesManager.getCachedValue(psiClass, () -> CachedValueProvider.Result.create(prepareWritableProperties(psiClass), PsiModificationTracker.JAVA_STRUCTURE_MODIFICATION_COUNT)); } return Collections.emptyMap(); } @NotNull private static Map<String, PsiMember> prepareWritableProperties(@NotNull PsiClass psiClass) { // todo search for setter in corresponding builder class, e.g. MyDataBuilder.setText() + MyData.getText(), reuse logic from hasBuilder() final Map<String, PsiMember> acceptableMembers = new THashMap<>(); for (PsiMethod constructor : psiClass.getConstructors()) { if (!constructor.hasModifierProperty(PsiModifier.PUBLIC)) continue; final PsiParameter[] parameters = constructor.getParameterList().getParameters(); for (PsiParameter parameter : parameters) { String propertyName = getPropertyNameFromNamedArgAnnotation(parameter); if (propertyName != null && !acceptableMembers.containsKey(propertyName)) { final PsiField field = psiClass.findFieldByName(propertyName, true); if (field != null && !field.hasModifierProperty(PsiModifier.STATIC)) { acceptableMembers.put(propertyName, field); } } } } for (PsiMethod method : psiClass.getAllMethods()) { if (method.hasModifierProperty(PsiModifier.STATIC) || !method.hasModifierProperty(PsiModifier.PUBLIC)) continue; if (PropertyUtil.isSimplePropertyGetter(method)) { PsiMember acceptableMember = method; final String propertyName = PropertyUtil.getPropertyName(method); assert propertyName != null; PsiMethod setter = findInstancePropertySetter(psiClass, propertyName); if (setter != null) { final PsiType setterArgType = setter.getParameterList().getParameters()[0].getType(); final PsiField field = psiClass.findFieldByName(propertyName, true); if (field != null && !field.hasModifierProperty(PsiModifier.STATIC)) { final PsiType fieldType = getWritablePropertyType(psiClass, field); if (fieldType == null || setterArgType.isConvertibleFrom(fieldType)) { acceptableMember = field; } } } else { final PsiType returnType = method.getReturnType(); if (returnType != null && isWritablePropertyType(psiClass, returnType)) { final PsiField field = psiClass.findFieldByName(propertyName, true); if (field != null && !field.hasModifierProperty(PsiModifier.STATIC)) { final PsiType fieldType = getWritablePropertyType(psiClass, field); if (fieldType == null || returnType.isAssignableFrom(fieldType)) { acceptableMember = field; } } } else { acceptableMember = null; } } if (acceptableMember != null) acceptableMembers.put(propertyName, acceptableMember); } } return acceptableMembers; } @Nullable private static String getPropertyNameFromNamedArgAnnotation(@NotNull PsiParameter parameter) { final PsiAnnotation annotation = AnnotationUtil.findAnnotation(parameter, JavaFxCommonNames.JAVAFX_BEANS_NAMED_ARG); if (annotation != null) { final PsiAnnotationMemberValue psiValue = annotation.findAttributeValue(JavaFxCommonNames.VALUE); if (psiValue instanceof PsiLiteralExpression) { final Object value = ((PsiLiteralExpression)psiValue).getValue(); if (value instanceof String) { return (String)value; } } } return null; } @Nullable public static PsiMethod findInstancePropertySetter(@NotNull PsiClass psiClass, @Nullable String propertyName) { if (StringUtil.isEmpty(propertyName)) return null; final String suggestedSetterName = PropertyUtil.suggestSetterName(propertyName); final PsiMethod[] setters = psiClass.findMethodsByName(suggestedSetterName, true); for (PsiMethod setter : setters) { if (setter.hasModifierProperty(PsiModifier.PUBLIC) && !setter.hasModifierProperty(PsiModifier.STATIC) && PropertyUtil.isSimplePropertySetter(setter)) { return setter; } } return null; } private static boolean isWritablePropertyType(@NotNull PsiClass psiClass, @NotNull PsiType fieldType) { return isObservableCollection(PsiUtil.resolveClassInType(fieldType)) && JavaGenericsUtil.getCollectionItemType(fieldType, psiClass.getResolveScope()) != null || InheritanceUtil.isInheritor(fieldType, JavaFxCommonNames.JAVAFX_COLLECTIONS_OBSERVABLE_MAP); } public static boolean isObservableCollection(@Nullable PsiClass psiClass) { return psiClass != null && (InheritanceUtil.isInheritor(psiClass, JavaFxCommonNames.JAVAFX_COLLECTIONS_OBSERVABLE_LIST) || InheritanceUtil.isInheritor(psiClass, JavaFxCommonNames.JAVAFX_COLLECTIONS_OBSERVABLE_SET) || InheritanceUtil.isInheritor(psiClass, JavaFxCommonNames.JAVAFX_COLLECTIONS_OBSERVABLE_ARRAY)); } @Nullable private static PsiSubstitutor getTagClassSubstitutor(@NotNull XmlAttribute xmlAttribute, @NotNull PsiClass controllerClass) { final XmlTag xmlTag = xmlAttribute.getParent(); final PsiClass tagClass = getTagClass(xmlTag); if (tagClass != null) { final String tagFieldName = xmlTag.getAttributeValue(FxmlConstants.FX_ID); if (!StringUtil.isEmpty(tagFieldName)) { final PsiField tagField = controllerClass.findFieldByName(tagFieldName, true); if (tagField != null && !tagField.hasModifierProperty(PsiModifier.STATIC) && isVisibleInFxml(tagField)) { final PsiClassType.ClassResolveResult resolveResult = PsiUtil.resolveGenericsClassInType(tagField.getType()); final PsiClass resolvedClass = resolveResult.getElement(); if (resolvedClass != null) { return TypeConversionUtil.getClassSubstitutor(tagClass, resolvedClass, resolveResult.getSubstitutor()); } } } } return null; } @Nullable public static PsiClassType getDeclaredEventType(@NotNull XmlAttribute xmlAttribute) { final PsiClass tagClass = getTagClass(xmlAttribute.getParent()); if (tagClass != null) { final PsiType eventHandlerPropertyType = getEventHandlerPropertyType(tagClass, xmlAttribute.getName()); if (eventHandlerPropertyType != null) { final PsiClass controllerClass = getControllerClass(xmlAttribute.getContainingFile()); if (controllerClass != null) { final PsiSubstitutor tagClassSubstitutor = getTagClassSubstitutor(xmlAttribute, controllerClass); final PsiType handlerType = tagClassSubstitutor != null ? tagClassSubstitutor.substitute(eventHandlerPropertyType) : eventHandlerPropertyType; final PsiClassType eventType = substituteEventType(handlerType, xmlAttribute.getProject()); final PsiType erasedType = eraseFreeTypeParameters(eventType, tagClass); return erasedType instanceof PsiClassType ? (PsiClassType)erasedType : null; } } } return null; } @Nullable private static PsiType getEventHandlerPropertyType(@NotNull PsiClass tagClass, @NotNull String eventName) { final PsiMethod[] handlerSetterCandidates = tagClass.findMethodsByName(PropertyUtil.suggestSetterName(eventName), true); for (PsiMethod handlerSetter : handlerSetterCandidates) { if (!handlerSetter.hasModifierProperty(PsiModifier.STATIC) && handlerSetter.hasModifierProperty(PsiModifier.PUBLIC)) { final PsiType propertyType = PropertyUtil.getPropertyType(handlerSetter); if (InheritanceUtil.isInheritor(propertyType, JavaFxCommonNames.JAVAFX_EVENT_EVENT_HANDLER)) { return propertyType; } } } final PsiField handlerField = tagClass.findFieldByName(eventName, true); final PsiClassType propertyType = getPropertyClassType(handlerField); if (InheritanceUtil.isInheritor(propertyType, JavaFxCommonNames.JAVAFX_EVENT_EVENT_HANDLER)) { return propertyType; } return null; } @Nullable private static PsiClassType substituteEventType(@Nullable PsiType eventHandlerType, @NotNull Project project) { if (!(eventHandlerType instanceof PsiClassType)) return null; final PsiClassType.ClassResolveResult resolveResult = ((PsiClassType)eventHandlerType).resolveGenerics(); final PsiClass eventHandlerClass = resolveResult.getElement(); if (eventHandlerClass == null) return null; final PsiSubstitutor eventHandlerClassSubstitutor = resolveResult.getSubstitutor(); final PsiClass eventHandlerInterface = JavaPsiFacade.getInstance(project).findClass(JavaFxCommonNames.JAVAFX_EVENT_EVENT_HANDLER, GlobalSearchScope.allScope(project)); if (eventHandlerInterface == null) return null; if (!InheritanceUtil.isInheritorOrSelf(eventHandlerClass, eventHandlerInterface, true)) return null; final PsiTypeParameter[] typeParameters = eventHandlerInterface.getTypeParameters(); if (typeParameters.length != 1) return null; final PsiTypeParameter eventTypeParameter = typeParameters[0]; final PsiSubstitutor substitutor = TypeConversionUtil.getSuperClassSubstitutor(eventHandlerInterface, eventHandlerClass, eventHandlerClassSubstitutor); final PsiType eventType = substitutor.substitute(eventTypeParameter); if (eventType instanceof PsiClassType) { return (PsiClassType)eventType; } if (eventType instanceof PsiWildcardType) { // TODO Handle wildcards more accurately final PsiType boundType = ((PsiWildcardType)eventType).getBound(); if (boundType instanceof PsiClassType) { return (PsiClassType)boundType; } } return null; } @Nullable private static PsiClass getFactoryProducedClass(@Nullable PsiClass psiClass, @Nullable String factoryMethodName) { if (psiClass == null || factoryMethodName == null) return null; final PsiMethod[] methods = psiClass.findMethodsByName(factoryMethodName, true); for (PsiMethod method : methods) { if (method.getParameterList().getParametersCount() == 0 && method.hasModifierProperty(PsiModifier.STATIC)) { return PsiUtil.resolveClassInClassTypeOnly(method.getReturnType()); } } return null; } @Nullable public static String validateEnumConstant(@NotNull PsiClass enumClass, @NonNls @Nullable String name) { if (!enumClass.isEnum() || name == null) return null; final Set<String> constantNames = CachedValuesManager.getCachedValue(enumClass, () -> CachedValueProvider.Result.create(Arrays.stream(enumClass.getFields()) .filter(PsiEnumConstant.class::isInstance) .map(PsiField::getName) .map(String::toUpperCase) .collect(Collectors.toCollection(THashSet::new)), PsiModificationTracker.JAVA_STRUCTURE_MODIFICATION_COUNT)); if (!constantNames.contains(name.toUpperCase())) { return "No enum constant '" + name + "' in " + enumClass.getQualifiedName(); } return null; } @NotNull public static String getPropertyName(@NotNull String memberName, boolean isMethod) { if (!isMethod) return memberName; final String propertyName = PropertyUtil.getPropertyName(memberName); return propertyName != null ? propertyName : memberName; } @Nullable public static PsiClass getTagValueClass(@NotNull XmlTag xmlTag) { return getTagValueClass(xmlTag, getTagClass(xmlTag)).getFirst(); } @NotNull public static Pair<PsiClass, Boolean> getTagValueClass(@NotNull XmlTag xmlTag, @Nullable PsiClass tagClass) { if (tagClass != null) { final XmlAttribute constAttr = xmlTag.getAttribute(FxmlConstants.FX_CONSTANT); if (constAttr != null) { final PsiField constField = tagClass.findFieldByName(constAttr.getValue(), true); if (constField != null) { final PsiType constType = constField.getType(); return Pair.create(PsiUtil.resolveClassInClassTypeOnly( constType instanceof PsiPrimitiveType ? ((PsiPrimitiveType)constType).getBoxedType(xmlTag) : constType), true); } } else { final XmlAttribute factoryAttr = xmlTag.getAttribute(FxmlConstants.FX_FACTORY); if (factoryAttr != null) { return Pair.create(getFactoryProducedClass(tagClass, factoryAttr.getValue()), true); } } } return Pair.create(tagClass, false); } public static boolean isControllerClass(@NotNull PsiClass psiClass) { final Project project = psiClass.getProject(); final GlobalSearchScope resolveScope = psiClass.getResolveScope(); if (isControllerClassName(project, psiClass.getQualifiedName(), resolveScope)) { return true; } final Ref<Boolean> refFound = new Ref<>(false); ClassInheritorsSearch.search(psiClass, resolveScope, true, true, false).forEach((aClass) -> { if (isControllerClassName(project, aClass.getQualifiedName(), resolveScope)) { refFound.set(true); return false; } return true; }); return refFound.get(); } private static boolean isControllerClassName(@NotNull Project project, @Nullable String qualifiedName, @NotNull GlobalSearchScope resolveScope) { return qualifiedName != null && !JavaFxControllerClassIndex.findFxmlWithController(project, qualifiedName, resolveScope).isEmpty(); } @Nullable private static CachedValueProvider.Result<PsiClass> computeInjectedControllerClass(PsiFile containingFile) { return ourGuard.doPreventingRecursion(containingFile, true, () -> { final Project project = containingFile.getProject(); final Ref<PsiClass> injectedController = new Ref<>(); final Ref<PsiFile> dep = new Ref<>(); final PsiClass fxmlLoader = JavaPsiFacade.getInstance(project).findClass(JavaFxCommonNames.JAVAFX_FXML_FXMLLOADER, GlobalSearchScope.allScope(project)); if (fxmlLoader != null) { final PsiMethod[] injectControllerMethods = fxmlLoader.findMethodsByName("setController", false); if (injectControllerMethods.length == 1) { final JavaFxRetrieveControllerProcessor processor = new JavaFxRetrieveControllerProcessor() { @Override protected boolean isResolveToSetter(PsiMethodCallExpression methodCallExpression) { return methodCallExpression.resolveMethod() == injectControllerMethods[0]; } }; final GlobalSearchScope globalSearchScope = GlobalSearchScope .getScopeRestrictedByFileTypes(containingFile.getResolveScope(), StdFileTypes.JAVA); ReferencesSearch.search(containingFile, globalSearchScope).forEach(reference -> { final PsiElement element = reference.getElement(); if (element instanceof PsiLiteralExpression) { final PsiNewExpression expression = PsiTreeUtil.getParentOfType(element, PsiNewExpression.class); if (expression != null) { final PsiType type = expression.getType(); if (type != null && type.equalsToText(JavaFxCommonNames.JAVAFX_FXML_FXMLLOADER)) { final PsiElement parent = expression.getParent(); if (parent instanceof PsiLocalVariable) { ReferencesSearch.search(parent).forEach(processor); final PsiClass controller = processor.getInjectedController(); if (controller != null) { injectedController.set(controller); dep.set(processor.getContainingFile()); return false; } } } } } return true; }); } } return new CachedValueProvider.Result<>(injectedController.get(), dep.get() != null ? dep.get() : PsiModificationTracker.MODIFICATION_COUNT); }); } private static abstract class JavaFxRetrieveControllerProcessor implements Processor<PsiReference> { private final Ref<PsiClass> myInjectedController = new Ref<>(); private final Ref<PsiFile> myContainingFile = new Ref<>(); protected abstract boolean isResolveToSetter(PsiMethodCallExpression methodCallExpression); @Override public boolean process(PsiReference reference) { final PsiElement element = reference.getElement(); if (element instanceof PsiReferenceExpression) { final PsiMethodCallExpression methodCallExpression = PsiTreeUtil.getParentOfType(element, PsiMethodCallExpression.class); if (methodCallExpression != null && isResolveToSetter(methodCallExpression)) { final PsiExpression[] expressions = methodCallExpression.getArgumentList().getExpressions(); if (expressions.length > 0) { final PsiClass psiClass = PsiUtil.resolveClassInType(expressions[0].getType()); if (psiClass != null) { myInjectedController.set(psiClass); myContainingFile.set(methodCallExpression.getContainingFile()); return false; } } } } return true; } private PsiClass getInjectedController() { return myInjectedController.get(); } private PsiFile getContainingFile() { return myContainingFile.get(); } } }
plugins/javaFX/src/org/jetbrains/plugins/javaFX/fxml/JavaFxPsiUtil.java
/* * Copyright 2000-2013 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jetbrains.plugins.javaFX.fxml; import com.intellij.codeInsight.AnnotationUtil; import com.intellij.codeInsight.daemon.Validator; import com.intellij.codeInsight.daemon.impl.analysis.HighlightUtil; import com.intellij.codeInsight.daemon.impl.analysis.JavaGenericsUtil; import com.intellij.lang.ASTNode; import com.intellij.lang.xml.XMLLanguage; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.fileTypes.StdFileTypes; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.*; import com.intellij.openapi.util.text.StringUtil; import com.intellij.psi.*; import com.intellij.psi.impl.source.PostprocessReformattingAspect; import com.intellij.psi.search.GlobalSearchScope; import com.intellij.psi.search.searches.ClassInheritorsSearch; import com.intellij.psi.search.searches.ReferencesSearch; import com.intellij.psi.util.*; import com.intellij.psi.xml.*; import com.intellij.util.Processor; import com.intellij.xml.XmlAttributeDescriptor; import com.intellij.xml.XmlElementDescriptor; import gnu.trove.THashMap; import gnu.trove.THashSet; import org.jetbrains.annotations.Contract; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.jetbrains.plugins.javaFX.fxml.descriptors.JavaFxClassTagDescriptorBase; import org.jetbrains.plugins.javaFX.fxml.descriptors.JavaFxPropertyTagDescriptor; import org.jetbrains.plugins.javaFX.indexing.JavaFxControllerClassIndex; import java.util.*; import java.util.function.BiConsumer; import java.util.function.Consumer; import java.util.stream.Collectors; /** * User: anna */ public class JavaFxPsiUtil { private static final Logger LOG = Logger.getInstance("#" + JavaFxPsiUtil.class.getName()); public static XmlProcessingInstruction createSingleImportInstruction(String qualifiedName, Project project) { final String importText = "<?import " + qualifiedName + "?>"; final PsiElement child = PsiFileFactory.getInstance(project).createFileFromText("a.fxml", XMLLanguage.INSTANCE, importText).getFirstChild(); return PsiTreeUtil.findChildOfType(child, XmlProcessingInstruction.class); } public static List<String> parseImports(XmlFile file) { return parseInstructions(file, "import"); } public static List<String> parseInjectedLanguages(XmlFile file) { return parseInstructions(file, "language"); } private static List<String> parseInstructions(XmlFile file, String instructionName) { final List<String> definedImports = new ArrayList<>(); final XmlDocument document = file.getDocument(); if (document != null) { final XmlProlog prolog = document.getProlog(); final Collection<XmlProcessingInstruction> instructions = new ArrayList<>(PsiTreeUtil.findChildrenOfType(prolog, XmlProcessingInstruction.class)); for (final XmlProcessingInstruction instruction : instructions) { final String instructionTarget = getInstructionTarget(instructionName, instruction); if (instructionTarget != null) { definedImports.add(instructionTarget); } } } return definedImports; } @Nullable public static String getInstructionTarget(String instructionName, XmlProcessingInstruction instruction) { final ASTNode node = instruction.getNode(); ASTNode xmlNameNode = node.findChildByType(XmlTokenType.XML_NAME); ASTNode importNode = node.findChildByType(XmlTokenType.XML_TAG_CHARACTERS); if (!(xmlNameNode == null || !instructionName.equals(xmlNameNode.getText()) || importNode == null)) { return importNode.getText(); } return null; } public static PsiClass findPsiClass(String name, PsiElement context) { final Project project = context.getProject(); if (!StringUtil.getShortName(name).equals(name)) { final PsiClass psiClass = JavaPsiFacade.getInstance(project).findClass(name, GlobalSearchScope.allScope(project)); if (psiClass != null) { return psiClass; } return findNestedPsiClass(name, context, project); } return findPsiClass(name, parseImports((XmlFile)context.getContainingFile()), context, project); } private static PsiClass findNestedPsiClass(String name, PsiElement context, Project project) { final int dotIndex = name.indexOf('.'); if (dotIndex > 0) { final String outerName = name.substring(0, dotIndex); final PsiClass outerClass = findPsiClass(outerName, parseImports((XmlFile)context.getContainingFile()), context, project); if (outerClass != null) { final List<String> nameChain = StringUtil.split(name, ".", true, false); final List<String> nestedNames = nameChain.subList(1, nameChain.size()); PsiClass aClass = outerClass; for (String nestedName : nestedNames) { aClass = aClass.findInnerClassByName(nestedName, true); if (aClass == null) return null; } return aClass; } } return null; } private static PsiClass findPsiClass(String name, List<String> imports, PsiElement context, Project project) { PsiClass psiClass = null; if (imports != null) { JavaPsiFacade psiFacade = JavaPsiFacade.getInstance(project); PsiFile file = context.getContainingFile(); for (String anImport : imports) { if (StringUtil.getShortName(anImport).equals(name)) { psiClass = psiFacade.findClass(anImport, file.getResolveScope()); } else if (StringUtil.endsWith(anImport, ".*")) { psiClass = psiFacade.findClass(StringUtil.trimEnd(anImport, "*") + name, file.getResolveScope()); } if (psiClass != null) { return psiClass; } } } return null; } public static void insertImportWhenNeeded(XmlFile xmlFile, String shortName, String qualifiedName) { if (shortName != null && qualifiedName != null && findPsiClass(shortName, xmlFile.getRootTag()) == null) { final XmlDocument document = xmlFile.getDocument(); if (document != null) { final XmlProcessingInstruction processingInstruction = createSingleImportInstruction(qualifiedName, xmlFile.getProject()); final XmlProlog prolog = document.getProlog(); if (prolog != null) { prolog.add(processingInstruction); } else { document.addBefore(processingInstruction, document.getRootTag()); } PostprocessReformattingAspect.getInstance(xmlFile.getProject()).doPostponedFormatting(xmlFile.getViewProvider()); } } } public static PsiClass getPropertyClass(PsiElement member) { final PsiClassType classType = getPropertyClassType(member); return classType != null ? classType.resolve() : null; } public static PsiClassType getPropertyClassType(PsiElement member) { return getPropertyClassType(member, JavaFxCommonNames.JAVAFX_BEANS_PROPERTY_OBJECT_PROPERTY); } public static PsiClassType getPropertyClassType(PsiElement member, final String superTypeFQN) { if (member instanceof PsiMember) { final PsiType type = PropertyUtil.getPropertyType((PsiMember)member); if (type instanceof PsiClassType) { final PsiClassType.ClassResolveResult resolveResult = ((PsiClassType)type).resolveGenerics(); final PsiClass attributeClass = resolveResult.getElement(); if (attributeClass != null) { final PsiClass objectProperty = JavaPsiFacade.getInstance(attributeClass.getProject()) .findClass(superTypeFQN, attributeClass.getResolveScope()); if (objectProperty != null) { final PsiSubstitutor superClassSubstitutor = TypeConversionUtil .getClassSubstitutor(objectProperty, attributeClass, resolveResult.getSubstitutor()); if (superClassSubstitutor != null) { final PsiType propertyType = superClassSubstitutor.substitute(objectProperty.getTypeParameters()[0]); if (propertyType instanceof PsiClassType) { return (PsiClassType)propertyType; } } else { return (PsiClassType)type; } } } } } return null; } public static PsiMethod findStaticPropertySetter(String attributeName, XmlTag context) { final String packageName = StringUtil.getPackageName(attributeName); if (context != null && !StringUtil.isEmptyOrSpaces(packageName)) { final PsiClass classWithStaticProperty = findPsiClass(packageName, context); if (classWithStaticProperty != null) { return findStaticPropertySetter(attributeName, classWithStaticProperty); } } return null; } @Nullable public static PsiMethod findStaticPropertySetter(@NotNull String attributeName, @Nullable PsiClass classWithStaticProperty) { if (classWithStaticProperty == null) return null; final String setterName = PropertyUtil.suggestSetterName(StringUtil.getShortName(attributeName)); final PsiMethod[] setters = classWithStaticProperty.findMethodsByName(setterName, true); for (PsiMethod setter : setters) { if (setter.hasModifierProperty(PsiModifier.PUBLIC) && setter.hasModifierProperty(PsiModifier.STATIC) && setter.getParameterList().getParametersCount() == 2) { return setter; } } return null; } public static PsiMethod findPropertyGetter(@NotNull PsiClass psiClass, @Nullable String propertyName) { if (StringUtil.isEmpty(propertyName)) return null; PsiMethod getter = findPropertyGetter(psiClass, propertyName, null); if (getter != null) { return getter; } return findPropertyGetter(psiClass, propertyName, PsiType.BOOLEAN); } private static PsiMethod findPropertyGetter(final PsiClass psiClass, final String propertyName, final PsiType propertyType) { final String getterName = PropertyUtil.suggestGetterName(propertyName, propertyType); final PsiMethod[] getters = psiClass.findMethodsByName(getterName, true); for (PsiMethod getter : getters) { if (getter.hasModifierProperty(PsiModifier.PUBLIC) && !getter.hasModifierProperty(PsiModifier.STATIC) && PropertyUtil.isSimplePropertyGetter(getter)) { return getter; } } return null; } public static PsiMethod findObservablePropertyGetter(@NotNull PsiClass psiClass, @Nullable String propertyName) { if (StringUtil.isEmpty(propertyName)) return null; final PsiMethod[] getters = psiClass.findMethodsByName(propertyName + JavaFxCommonNames.PROPERTY_METHOD_SUFFIX, true); for (PsiMethod getter : getters) { if (getter.hasModifierProperty(PsiModifier.PUBLIC) && !getter.hasModifierProperty(PsiModifier.STATIC) && getter.getParameterList().getParametersCount() == 0 && InheritanceUtil.isInheritor(getter.getReturnType(), JavaFxCommonNames.JAVAFX_BEANS_VALUE_OBSERVABLE_VALUE)) { return getter; } } return null; } private static final Key<CachedValue<PsiClass>> INJECTED_CONTROLLER = Key.create("javafx.injected.controller"); private static final RecursionGuard ourGuard = RecursionManager.createGuard("javafx.controller"); public static PsiClass getControllerClass(final PsiFile containingFile) { if (containingFile instanceof XmlFile) { final XmlTag rootTag = ((XmlFile)containingFile).getRootTag(); final Project project = containingFile.getProject(); if (rootTag != null) { XmlAttribute attribute = rootTag.getAttribute(FxmlConstants.FX_CONTROLLER); if (attribute != null) { final PsiClass controllerClass = findControllerClass(containingFile, project, attribute); if (controllerClass != null) { return controllerClass; } } } final CachedValuesManager manager = CachedValuesManager.getManager(containingFile.getProject()); final PsiClass injectedControllerClass = ourGuard.doPreventingRecursion(containingFile, true, () -> manager.getCachedValue(containingFile, INJECTED_CONTROLLER, new JavaFxControllerCachedValueProvider(containingFile.getProject(), containingFile), true)); if (injectedControllerClass != null) { return injectedControllerClass; } if (rootTag != null && FxmlConstants.FX_ROOT.equals(rootTag.getName())) { final XmlAttribute rootTypeAttr = rootTag.getAttribute(FxmlConstants.TYPE); if (rootTypeAttr != null) { return findControllerClass(containingFile, project, rootTypeAttr); } } } return null; } private static PsiClass findControllerClass(PsiFile containingFile, Project project, XmlAttribute attribute) { final String attributeValue = attribute.getValue(); if (!StringUtil.isEmptyOrSpaces(attributeValue)) { final GlobalSearchScope customScope = GlobalSearchScope.projectScope(project).intersectWith(containingFile.getResolveScope()); return JavaPsiFacade.getInstance(project).findClass(attributeValue, customScope); } return null; } public static boolean isEventHandlerProperty(@NotNull XmlAttribute attribute) { final PsiClass tagClass = getTagClass(attribute.getParent()); return tagClass != null && getEventHandlerPropertyType(tagClass, attribute.getName()) != null; } @Nullable public static PsiClass getTagClass(@Nullable XmlAttributeValue xmlAttributeValue) { if (xmlAttributeValue != null) { final PsiElement parent = xmlAttributeValue.getParent(); if (parent instanceof XmlAttribute) { final XmlTag xmlTag = ((XmlAttribute)parent).getParent(); return getTagClass(xmlTag); } } return null; } @Nullable public static PsiClass getTagClass(@Nullable XmlTag xmlTag) { if (xmlTag != null) { final XmlElementDescriptor descriptor = xmlTag.getDescriptor(); if (descriptor != null) { final PsiElement declaration = descriptor.getDeclaration(); if (declaration instanceof PsiClass) { return (PsiClass)declaration; } } } return null; } @Nullable public static PsiElement getAttributeDeclaration(@Nullable XmlAttributeValue xmlAttributeValue) { if (xmlAttributeValue != null) { final PsiElement parent = xmlAttributeValue.getParent(); if (parent instanceof XmlAttribute) { final XmlAttributeDescriptor descriptor = ((XmlAttribute)parent).getDescriptor(); if (descriptor != null) { return descriptor.getDeclaration(); } } } return null; } public static boolean isVisibleInFxml(@NotNull PsiMember psiMember) { return psiMember.hasModifierProperty(PsiModifier.PUBLIC) || AnnotationUtil.isAnnotated(psiMember, JavaFxCommonNames.JAVAFX_FXML_ANNOTATION, false); } @Nullable public static PsiMethod findValueOfMethod(@NotNull final PsiType psiType) { final PsiClass psiClass = PsiUtil.resolveClassInClassTypeOnly(psiType); return psiClass != null ? findValueOfMethod(psiClass) : null; } @Nullable public static PsiMethod findValueOfMethod(@NotNull final PsiClass psiClass) { return CachedValuesManager.getCachedValue(psiClass, () -> { final PsiMethod[] methods = psiClass.findMethodsByName(JavaFxCommonNames.VALUE_OF, true); for (PsiMethod method : methods) { if (method.hasModifierProperty(PsiModifier.STATIC)) { final PsiParameter[] parameters = method.getParameterList().getParameters(); if (parameters.length == 1) { final PsiType type = parameters[0].getType(); if (type.equalsToText(CommonClassNames.JAVA_LANG_STRING) || type.equalsToText(CommonClassNames.JAVA_LANG_OBJECT)) { if (psiClass.equals(PsiUtil.resolveClassInType(method.getReturnType()))) { return CachedValueProvider.Result.create(method, PsiModificationTracker.JAVA_STRUCTURE_MODIFICATION_COUNT); } } } } } return CachedValueProvider.Result.create(null, PsiModificationTracker.JAVA_STRUCTURE_MODIFICATION_COUNT); }); } public static boolean isReadOnly(String attributeName, XmlTag tag) { if (findStaticPropertySetter(attributeName, tag) != null) return false; final XmlElementDescriptor descriptor = tag.getDescriptor(); if (descriptor instanceof JavaFxClassTagDescriptorBase) { return ((JavaFxClassTagDescriptorBase)descriptor).isReadOnlyAttribute(attributeName); } return false; } public static boolean isExpressionBinding(@Nullable String value) { return value != null && value.startsWith("${") && value.endsWith("}"); } public static boolean isIncorrectExpressionBinding(@Nullable String value) { if (value == null || !value.startsWith("$")) return false; if (value.length() == 1) return true; final boolean expressionStarts = value.startsWith("${"); final boolean expressionEnds = value.endsWith("}"); if (expressionStarts && expressionEnds && value.length() == 3) return true; if (expressionStarts != expressionEnds) return true; if (expressionStarts && value.indexOf('{', 2) >= 2) return true; if (expressionEnds && value.indexOf('}') < value.length() - 1) return true; return false; } @Nullable public static PsiType getWritablePropertyType(@Nullable final PsiType type, @NotNull final Project project) { final PsiClassType.ClassResolveResult resolveResult = PsiUtil.resolveGenericsClassInType(type); final PsiClass psiClass = resolveResult.getElement(); if (psiClass != null) { final PsiClass propertyClass = JavaPsiFacade.getInstance(project).findClass(JavaFxCommonNames.JAVAFX_BEANS_PROPERTY, GlobalSearchScope.allScope(project)); if (propertyClass != null) { final PsiSubstitutor substitutor = TypeConversionUtil.getClassSubstitutor(propertyClass, psiClass, resolveResult.getSubstitutor()); if (substitutor != null) { return substitutor.substitute(propertyClass.getTypeParameters()[0]); } } } return null; } @Nullable private static PsiType getDefaultPropertyExpectedType(@Nullable PsiClass aClass) { if (aClass == null) return null; return CachedValuesManager.getCachedValue(aClass, () -> { final PsiAnnotation annotation = AnnotationUtil.findAnnotationInHierarchy(aClass, Collections.singleton(JavaFxCommonNames.JAVAFX_BEANS_DEFAULT_PROPERTY)); if (annotation != null) { final PsiAnnotationMemberValue memberValue = annotation.findAttributeValue(null); if (memberValue != null) { final String propertyName = StringUtil.unquoteString(memberValue.getText()); final PsiMethod getter = findPropertyGetter(aClass, propertyName); if (getter != null) { final PsiType propertyType = eraseFreeTypeParameters(getter.getReturnType(), getter); return CachedValueProvider.Result.create(propertyType, PsiModificationTracker.JAVA_STRUCTURE_MODIFICATION_COUNT); } } } return CachedValueProvider.Result.create(null, PsiModificationTracker.JAVA_STRUCTURE_MODIFICATION_COUNT); }); } public static String getDefaultPropertyName(@Nullable PsiClass aClass) { if (aClass == null) { return null; } final PsiAnnotation annotation = AnnotationUtil.findAnnotationInHierarchy(aClass, Collections.singleton( JavaFxCommonNames.JAVAFX_BEANS_DEFAULT_PROPERTY)); if (annotation != null) { final PsiAnnotationMemberValue memberValue = annotation.findAttributeValue(null); if (memberValue != null) { return StringUtil.unquoteString(memberValue.getText()); } } return null; } public static boolean isAbleToInstantiate(@NotNull PsiClass psiClass) { return isAbleToInstantiate(psiClass, message -> { }); } public static boolean isAbleToInstantiate(@NotNull PsiClass psiClass, @NotNull Consumer<String> messageConsumer) { if (psiClass.isEnum() || hasNamedArgOrNoArgConstructor(psiClass)) return true; final PsiMethod valueOf = findValueOfMethod(psiClass); if (valueOf == null) { if (!hasBuilder(psiClass)) { messageConsumer.accept("Unable to instantiate"); return false; } } return true; } private static boolean hasNamedArgOrNoArgConstructor(@NotNull PsiClass psiClass) { if (psiClass.getConstructors().length == 0) return true; return CachedValuesManager.getCachedValue(psiClass, () -> { for (PsiMethod constructor : psiClass.getConstructors()) { final PsiParameter[] parameters = constructor.getParameterList().getParameters(); if (parameters.length == 0) { return CachedValueProvider.Result.create(true, PsiModificationTracker.JAVA_STRUCTURE_MODIFICATION_COUNT); } boolean annotated = true; for (PsiParameter parameter : parameters) { if (!AnnotationUtil.isAnnotated(parameter, JavaFxCommonNames.JAVAFX_BEANS_NAMED_ARG, false)) { annotated = false; break; } } if (annotated) return CachedValueProvider.Result.create(true, PsiModificationTracker.JAVA_STRUCTURE_MODIFICATION_COUNT); } return CachedValueProvider.Result.create(false, PsiModificationTracker.JAVA_STRUCTURE_MODIFICATION_COUNT); }); } public static boolean hasBuilder(@NotNull final PsiClass psiClass) { return CachedValuesManager.getCachedValue(psiClass, () -> { final Project project = psiClass.getProject(); final PsiClass builderClass = JavaPsiFacade.getInstance(project).findClass(JavaFxCommonNames.JAVAFX_FXML_BUILDER, GlobalSearchScope.allScope(project)); if (builderClass != null) { final PsiMethod[] buildMethods = builderClass.findMethodsByName("build", false); if (buildMethods.length == 1 && buildMethods[0].getParameterList().getParametersCount() == 0) { if (ClassInheritorsSearch.search(builderClass).forEach(aClass -> { PsiType returnType = null; final PsiMethod method = MethodSignatureUtil.findMethodBySuperMethod(aClass, buildMethods[0], false); if (method != null) { returnType = method.getReturnType(); } return !Comparing.equal(psiClass, PsiUtil.resolveClassInClassTypeOnly(returnType)); })) { return CachedValueProvider.Result.create(false, PsiModificationTracker.JAVA_STRUCTURE_MODIFICATION_COUNT); } } } return CachedValueProvider.Result.create(true, PsiModificationTracker.JAVA_STRUCTURE_MODIFICATION_COUNT); }); } public static boolean isClassAcceptable(@Nullable XmlTag targetTag, @Nullable final PsiClass fromClass) { return isClassAcceptable(targetTag, fromClass, (message, type) -> { }); } public static boolean isClassAcceptable(@Nullable XmlTag targetTag, @Nullable final PsiClass fromClass, @NotNull BiConsumer<String, Validator.ValidationHost.ErrorType> messageConsumer) { if (targetTag == null || fromClass == null || !fromClass.isValid()) { return true; } final XmlElementDescriptor tagDescriptor = targetTag.getDescriptor(); if (tagDescriptor instanceof JavaFxPropertyTagDescriptor) { final PsiClass containingClass = ((JavaFxPropertyTagDescriptor)tagDescriptor).getPsiClass(); final PsiType targetType = getWritablePropertyType(containingClass, tagDescriptor.getDeclaration()); return canCoerce(targetType, fromClass, targetTag, messageConsumer); } else if (tagDescriptor instanceof JavaFxClassTagDescriptorBase) { final PsiElement tagDeclaration = tagDescriptor.getDeclaration(); if (tagDeclaration instanceof PsiClass) { PsiClass defaultPropertyOwnerClass = (PsiClass)tagDeclaration; final XmlAttribute factoryAttr = targetTag.getAttribute(FxmlConstants.FX_FACTORY); if (factoryAttr != null) { defaultPropertyOwnerClass = getFactoryProducedClass((PsiClass)tagDeclaration, factoryAttr.getValue()); } final PsiType targetType = getDefaultPropertyExpectedType(defaultPropertyOwnerClass); if (targetType != null) { return canCoerce(targetType, fromClass, targetTag, messageConsumer); } if (!isObservableCollection(defaultPropertyOwnerClass)) { return noDefaultPropertyError(messageConsumer); } } } return true; } private static boolean noDefaultPropertyError(@NotNull BiConsumer<String, Validator.ValidationHost.ErrorType> messageConsumer) { messageConsumer.accept("Parent tag has no default property", Validator.ValidationHost.ErrorType.ERROR); return false; } private static boolean canCoerce(@Nullable PsiType targetType, @NotNull PsiClass fromClass, @NotNull PsiElement context, @NotNull BiConsumer<String, Validator.ValidationHost.ErrorType> messageConsumer) { if (targetType == null) return true; PsiType collectionItemType = JavaGenericsUtil.getCollectionItemType(targetType, fromClass.getResolveScope()); if (collectionItemType == null && InheritanceUtil.isInheritor(targetType, JavaFxCommonNames.JAVAFX_BEANS_PROPERTY)) { collectionItemType = getWritablePropertyType(targetType, fromClass.getProject()); } if (collectionItemType != null) { return canCoerceImpl(collectionItemType, fromClass, context, messageConsumer); } return canCoerceImpl(targetType, fromClass, context, messageConsumer); } @Nullable private static PsiType eraseFreeTypeParameters(@Nullable PsiType psiType, @NotNull PsiMember member) { final PsiClass containingClass = member.getContainingClass(); return eraseFreeTypeParameters(psiType, containingClass); } @Nullable private static PsiType eraseFreeTypeParameters(@Nullable PsiType psiType, @Nullable PsiClass containingClass) { if (containingClass == null) return null; return JavaPsiFacade.getElementFactory(containingClass.getProject()).createRawSubstitutor(containingClass).substitute(psiType); } private static boolean canCoerceImpl(@NotNull PsiType targetType, @NotNull PsiClass fromClass, @NotNull PsiElement context, @NotNull BiConsumer<String, Validator.ValidationHost.ErrorType> messageConsumer) { if (targetType.equalsToText(CommonClassNames.JAVA_LANG_OBJECT) || targetType.equalsToText(CommonClassNames.JAVA_LANG_STRING) || targetType.isAssignableFrom(PsiTypesUtil.getClassType(fromClass))) { return true; } final PsiClassType boxedTargetClass = targetType instanceof PsiPrimitiveType ? ((PsiPrimitiveType)targetType).getBoxedType(context) : null; if (boxedTargetClass != null && InheritanceUtil.isInheritor(boxedTargetClass, CommonClassNames.JAVA_LANG_NUMBER) || InheritanceUtil.isInheritor(targetType, CommonClassNames.JAVA_LANG_NUMBER)) { if (Comparing.strEqual(fromClass.getQualifiedName(), CommonClassNames.JAVA_LANG_STRING) || InheritanceUtil.isInheritor(fromClass, CommonClassNames.JAVA_LANG_NUMBER)) { return true; } return unrelatedTypesWarning(targetType, fromClass, messageConsumer); } final PsiMethod valueOfMethod = findValueOfMethod(targetType); final PsiType valueOfParameterType = valueOfMethod != null && valueOfMethod.getParameterList().getParametersCount() == 1 ? valueOfMethod.getParameterList().getParameters()[0].getType() : null; if (valueOfParameterType != null && valueOfParameterType.equalsToText(CommonClassNames.JAVA_LANG_OBJECT)) { return true; } if (Comparing.strEqual(fromClass.getQualifiedName(), CommonClassNames.JAVA_LANG_STRING)) { if (isPrimitiveOrBoxed(targetType) || valueOfParameterType != null && valueOfParameterType.equalsToText(CommonClassNames.JAVA_LANG_STRING)) { return true; } } if (valueOfMethod != null) { return unrelatedTypesWarning(targetType, fromClass, messageConsumer); } return unableToCoerceError(targetType, fromClass, messageConsumer); } private static boolean unableToCoerceError(@NotNull PsiType targetType, @NotNull PsiClass fromClass, @NotNull BiConsumer<String, Validator.ValidationHost.ErrorType> messageConsumer) { messageConsumer.accept("Unable to coerce " + HighlightUtil.formatClass(fromClass) + " to " + targetType.getCanonicalText(), Validator.ValidationHost.ErrorType.ERROR); return false; } private static boolean unrelatedTypesWarning(@NotNull PsiType targetType, @NotNull PsiClass fromClass, @NotNull BiConsumer<String, Validator.ValidationHost.ErrorType> messageConsumer) { messageConsumer.accept("Conversion between unrelated types, " + HighlightUtil.formatClass(fromClass) + " to " + targetType.getCanonicalText(), Validator.ValidationHost.ErrorType.WARNING); return true; } public static boolean isOutOfHierarchy(final XmlAttributeValue element) { XmlTag tag = PsiTreeUtil.getParentOfType(element, XmlTag.class); while (tag != null) { if (FxmlConstants.FX_DEFINE.equals(tag.getName())) { return true; } tag = tag.getParentTag(); } return false; } public static PsiType getWrappedPropertyType(final PsiField field, final Project project, final Map<String, PsiType> typeMap) { return CachedValuesManager.getCachedValue(field, () -> { final PsiType fieldType = field.getType(); final PsiClassType.ClassResolveResult resolveResult = PsiUtil.resolveGenericsClassInType(fieldType); final PsiClass fieldClass = resolveResult.getElement(); if (fieldClass == null) { final PsiType propertyType = eraseFreeTypeParameters(fieldType, field); return CachedValueProvider.Result.create(propertyType, PsiModificationTracker.JAVA_STRUCTURE_MODIFICATION_COUNT); } PsiType substitute = null; for (String typeName : typeMap.keySet()) { if (InheritanceUtil.isInheritor(fieldType, typeName)) { substitute = typeMap.get(typeName); break; } } if (substitute == null) { if (!InheritanceUtil.isInheritor(fieldType, JavaFxCommonNames.JAVAFX_BEANS_VALUE_OBSERVABLE_VALUE)) { final PsiType propertyType = eraseFreeTypeParameters(fieldType, field); return CachedValueProvider.Result.create(propertyType, PsiModificationTracker.JAVA_STRUCTURE_MODIFICATION_COUNT); } final PsiClass aClass = JavaPsiFacade.getInstance(project) .findClass(JavaFxCommonNames.JAVAFX_BEANS_VALUE_OBSERVABLE_VALUE, GlobalSearchScope.allScope(project)); LOG.assertTrue(aClass != null); final PsiSubstitutor substitutor = TypeConversionUtil.getSuperClassSubstitutor(aClass, fieldClass, resolveResult.getSubstitutor()); final PsiMethod[] values = aClass.findMethodsByName(JavaFxCommonNames.GET_VALUE, false); LOG.assertTrue(values.length == 1); substitute = substitutor.substitute(values[0].getReturnType()); } final PsiType propertyType = eraseFreeTypeParameters(substitute, field); return CachedValueProvider.Result.create(propertyType, PsiModificationTracker.JAVA_STRUCTURE_MODIFICATION_COUNT); }); } @Nullable public static PsiType getWritablePropertyType(@Nullable PsiClass containingClass, @Nullable PsiElement declaration) { if (declaration instanceof PsiField) { return getWrappedPropertyType((PsiField)declaration, declaration.getProject(), JavaFxCommonNames.ourWritableMap); } if (declaration instanceof PsiMethod) { final PsiMethod method = (PsiMethod)declaration; if (method.getParameterList().getParametersCount() != 0) { return getSetterArgumentType(method); } final String propertyName = PropertyUtil.getPropertyName(method); final PsiClass psiClass = containingClass != null ? containingClass : method.getContainingClass(); if (propertyName != null && containingClass != null) { final PsiMethod setter = findInstancePropertySetter(psiClass, propertyName); if (setter != null) { final PsiType setterArgumentType = getSetterArgumentType(setter); if (setterArgumentType != null) return setterArgumentType; } } return getGetterReturnType(method); } return null; } @Nullable private static PsiType getSetterArgumentType(@NotNull PsiMethod method) { return CachedValuesManager.getCachedValue(method, () -> { final PsiParameter[] parameters = method.getParameterList().getParameters(); final boolean isStatic = method.hasModifierProperty(PsiModifier.STATIC); if (isStatic && parameters.length == 2 || !isStatic && parameters.length == 1) { final PsiType argumentType = eraseFreeTypeParameters(parameters[parameters.length - 1].getType(), method); return CachedValueProvider.Result.create(argumentType, PsiModificationTracker.JAVA_STRUCTURE_MODIFICATION_COUNT); } return CachedValueProvider.Result.create(null, PsiModificationTracker.JAVA_STRUCTURE_MODIFICATION_COUNT); }); } private static PsiType getGetterReturnType(@NotNull PsiMethod method) { return CachedValuesManager.getCachedValue(method, () -> { final PsiType returnType = eraseFreeTypeParameters(method.getReturnType(), method); return CachedValueProvider.Result.create(returnType, PsiModificationTracker.JAVA_STRUCTURE_MODIFICATION_COUNT); }); } @Nullable public static PsiType getReadablePropertyType(@Nullable PsiElement declaration) { if (declaration instanceof PsiField) { return getWrappedPropertyType((PsiField)declaration, declaration.getProject(), JavaFxCommonNames.ourReadOnlyMap); } if (declaration instanceof PsiMethod) { PsiMethod psiMethod = (PsiMethod)declaration; if (psiMethod.getParameterList().getParametersCount() == 0 && !psiMethod.hasModifierProperty(PsiModifier.STATIC)) { return getGetterReturnType(psiMethod); } } return null; } @NotNull public static Map<String, XmlAttributeValue> collectFileIds(@Nullable final XmlTag currentTag) { if (currentTag == null) return Collections.emptyMap(); final PsiFile containingFile = currentTag.getContainingFile(); final XmlAttribute currentIdAttribute = currentTag.getAttribute(FxmlConstants.FX_ID); return collectFileIds(containingFile, currentIdAttribute != null ? currentIdAttribute.getValue() : null); } @NotNull public static Map<String, XmlAttributeValue> collectFileIds(@Nullable PsiFile psiFile, @Nullable String skipFxId) { if (!(psiFile instanceof XmlFile)) return Collections.emptyMap(); final XmlTag rootTag = ((XmlFile)psiFile).getRootTag(); if (rootTag == null) return Collections.emptyMap(); final Map<String, XmlAttributeValue> cachedIds = CachedValuesManager .getCachedValue(rootTag, () -> new CachedValueProvider.Result<>(prepareFileIds(rootTag), PsiModificationTracker.MODIFICATION_COUNT)); if (skipFxId != null && cachedIds.containsKey(skipFxId)) { final Map<String, XmlAttributeValue> filteredIds = new THashMap<>(cachedIds); filteredIds.remove(skipFxId); return filteredIds; } return cachedIds; } @NotNull private static Map<String, XmlAttributeValue> prepareFileIds(XmlTag rootTag) { final Map<String, XmlAttributeValue> fileIds = new THashMap<>(); for (XmlTag tag : SyntaxTraverser.psiTraverser().withRoot(rootTag).filter(XmlTag.class)) { final XmlAttribute idAttribute = tag.getAttribute(FxmlConstants.FX_ID); if (idAttribute != null) { final String idValue = idAttribute.getValue(); if (idValue != null) fileIds.put(idValue, idAttribute.getValueElement()); } } final XmlAttribute controllerAttribute = rootTag.getAttribute(FxmlConstants.FX_CONTROLLER); if (controllerAttribute != null) { fileIds.put(FxmlConstants.CONTROLLER, controllerAttribute.getValueElement()); } return fileIds; } @Nullable public static PsiClass getTagClassById(@Nullable XmlAttributeValue xmlAttributeValue, @Nullable String id, @NotNull PsiElement context) { return FxmlConstants.CONTROLLER.equals(id) ? getControllerClass(context.getContainingFile()) : getTagClass(xmlAttributeValue); } @Nullable public static PsiClass getWritablePropertyClass(@Nullable XmlAttributeValue xmlAttributeValue) { if (xmlAttributeValue != null) { return getPropertyClass(getWritablePropertyType(xmlAttributeValue), xmlAttributeValue); } return null; } @Nullable public static PsiType getWritablePropertyType(@Nullable XmlAttributeValue xmlAttributeValue) { final PsiClass tagClass = getTagClass(xmlAttributeValue); if (tagClass != null) { final PsiElement declaration = getAttributeDeclaration(xmlAttributeValue); if (declaration != null) { return getWritablePropertyType(tagClass, declaration); } } return null; } @Nullable public static PsiClass getPropertyClass(@Nullable PsiType propertyType, @NotNull PsiElement context) { if (propertyType instanceof PsiPrimitiveType) { PsiClassType boxedType = ((PsiPrimitiveType)propertyType).getBoxedType(context); return boxedType != null ? boxedType.resolve() : null; } return PsiUtil.resolveClassInType(propertyType); } public static boolean hasConversionFromAnyType(@NotNull PsiClass targetClass) { return Comparing.strEqual(targetClass.getQualifiedName(), CommonClassNames.JAVA_LANG_STRING) || findValueOfMethod(targetClass) != null; } @Nullable public static String getBoxedPropertyType(@Nullable PsiClass containingClass, @Nullable PsiMember declaration) { PsiType psiType = getWritablePropertyType(containingClass, declaration); if (psiType instanceof PsiPrimitiveType) { return ((PsiPrimitiveType)psiType).getBoxedTypeName(); } if (PsiPrimitiveType.getUnboxedType(psiType) != null) { final PsiClass psiClass = PsiUtil.resolveClassInType(psiType); if (psiClass != null) { return psiClass.getQualifiedName(); } } return null; } @Contract("null->false") public static boolean isPrimitiveOrBoxed(@Nullable PsiType psiType) { return psiType instanceof PsiPrimitiveType || PsiPrimitiveType.getUnboxedType(psiType) != null; } @NotNull public static Map<String, PsiMember> collectReadableProperties(@Nullable PsiClass psiClass) { if (psiClass != null) { return CachedValuesManager.getCachedValue(psiClass, () -> CachedValueProvider.Result.create(prepareReadableProperties(psiClass), PsiModificationTracker.JAVA_STRUCTURE_MODIFICATION_COUNT)); } return Collections.emptyMap(); } @NotNull private static Map<String, PsiMember> prepareReadableProperties(@NotNull PsiClass psiClass) { final Map<String, PsiMember> acceptableMembers = new THashMap<>(); for (PsiMethod method : psiClass.getAllMethods()) { if (method.hasModifierProperty(PsiModifier.STATIC) || !method.hasModifierProperty(PsiModifier.PUBLIC)) continue; if (PropertyUtil.isSimplePropertyGetter(method)) { final String propertyName = PropertyUtil.getPropertyName(method); assert propertyName != null; acceptableMembers.put(propertyName, method); } } return acceptableMembers; } @NotNull public static Map<String, PsiMember> collectWritableProperties(@Nullable PsiClass psiClass) { if (psiClass != null) { return CachedValuesManager.getCachedValue(psiClass, () -> CachedValueProvider.Result.create(prepareWritableProperties(psiClass), PsiModificationTracker.JAVA_STRUCTURE_MODIFICATION_COUNT)); } return Collections.emptyMap(); } @NotNull private static Map<String, PsiMember> prepareWritableProperties(@NotNull PsiClass psiClass) { // todo search for setter in corresponding builder class, e.g. MyDataBuilder.setText() + MyData.getText(), reuse logic from hasBuilder() final Map<String, PsiMember> acceptableMembers = new THashMap<>(); for (PsiMethod constructor : psiClass.getConstructors()) { if (!constructor.hasModifierProperty(PsiModifier.PUBLIC)) continue; final PsiParameter[] parameters = constructor.getParameterList().getParameters(); for (PsiParameter parameter : parameters) { String propertyName = getPropertyNameFromNamedArgAnnotation(parameter); if (propertyName != null && !acceptableMembers.containsKey(propertyName)) { final PsiField field = psiClass.findFieldByName(propertyName, true); if (field != null && !field.hasModifierProperty(PsiModifier.STATIC)) { acceptableMembers.put(propertyName, field); } } } } for (PsiMethod method : psiClass.getAllMethods()) { if (method.hasModifierProperty(PsiModifier.STATIC) || !method.hasModifierProperty(PsiModifier.PUBLIC)) continue; if (PropertyUtil.isSimplePropertyGetter(method)) { PsiMember acceptableMember = method; final String propertyName = PropertyUtil.getPropertyName(method); assert propertyName != null; PsiMethod setter = findInstancePropertySetter(psiClass, propertyName); if (setter != null) { final PsiType setterArgType = setter.getParameterList().getParameters()[0].getType(); final PsiField field = psiClass.findFieldByName(propertyName, true); if (field != null && !field.hasModifierProperty(PsiModifier.STATIC)) { final PsiType fieldType = getWritablePropertyType(psiClass, field); if (fieldType == null || setterArgType.isConvertibleFrom(fieldType)) { acceptableMember = field; } } } else { final PsiType returnType = method.getReturnType(); if (returnType != null && isWritablePropertyType(psiClass, returnType)) { final PsiField field = psiClass.findFieldByName(propertyName, true); if (field != null && !field.hasModifierProperty(PsiModifier.STATIC)) { final PsiType fieldType = getWritablePropertyType(psiClass, field); if (fieldType == null || returnType.isAssignableFrom(fieldType)) { acceptableMember = field; } } } else { acceptableMember = null; } } if (acceptableMember != null) acceptableMembers.put(propertyName, acceptableMember); } } return acceptableMembers; } @Nullable private static String getPropertyNameFromNamedArgAnnotation(@NotNull PsiParameter parameter) { final PsiAnnotation annotation = AnnotationUtil.findAnnotation(parameter, JavaFxCommonNames.JAVAFX_BEANS_NAMED_ARG); if (annotation != null) { final PsiAnnotationMemberValue psiValue = annotation.findAttributeValue(JavaFxCommonNames.VALUE); if (psiValue instanceof PsiLiteralExpression) { final Object value = ((PsiLiteralExpression)psiValue).getValue(); if (value instanceof String) { return (String)value; } } } return null; } @Nullable public static PsiMethod findInstancePropertySetter(@NotNull PsiClass psiClass, @Nullable String propertyName) { if (StringUtil.isEmpty(propertyName)) return null; final String suggestedSetterName = PropertyUtil.suggestSetterName(propertyName); final PsiMethod[] setters = psiClass.findMethodsByName(suggestedSetterName, true); for (PsiMethod setter : setters) { if (setter.hasModifierProperty(PsiModifier.PUBLIC) && !setter.hasModifierProperty(PsiModifier.STATIC) && PropertyUtil.isSimplePropertySetter(setter)) { return setter; } } return null; } private static boolean isWritablePropertyType(@NotNull PsiClass psiClass, @NotNull PsiType fieldType) { return isObservableCollection(PsiUtil.resolveClassInType(fieldType)) && JavaGenericsUtil.getCollectionItemType(fieldType, psiClass.getResolveScope()) != null || InheritanceUtil.isInheritor(fieldType, JavaFxCommonNames.JAVAFX_COLLECTIONS_OBSERVABLE_MAP); } public static boolean isObservableCollection(@Nullable PsiClass psiClass) { return psiClass != null && (InheritanceUtil.isInheritor(psiClass, JavaFxCommonNames.JAVAFX_COLLECTIONS_OBSERVABLE_LIST) || InheritanceUtil.isInheritor(psiClass, JavaFxCommonNames.JAVAFX_COLLECTIONS_OBSERVABLE_SET) || InheritanceUtil.isInheritor(psiClass, JavaFxCommonNames.JAVAFX_COLLECTIONS_OBSERVABLE_ARRAY)); } @Nullable private static PsiSubstitutor getTagClassSubstitutor(@NotNull XmlAttribute xmlAttribute, @NotNull PsiClass controllerClass) { final XmlTag xmlTag = xmlAttribute.getParent(); final PsiClass tagClass = getTagClass(xmlTag); if (tagClass != null) { final String tagFieldName = xmlTag.getAttributeValue(FxmlConstants.FX_ID); if (!StringUtil.isEmpty(tagFieldName)) { final PsiField tagField = controllerClass.findFieldByName(tagFieldName, true); if (tagField != null && !tagField.hasModifierProperty(PsiModifier.STATIC) && isVisibleInFxml(tagField)) { final PsiClassType.ClassResolveResult resolveResult = PsiUtil.resolveGenericsClassInType(tagField.getType()); final PsiClass resolvedClass = resolveResult.getElement(); if (resolvedClass != null) { return TypeConversionUtil.getClassSubstitutor(tagClass, resolvedClass, resolveResult.getSubstitutor()); } } } } return null; } @Nullable public static PsiClassType getDeclaredEventType(@NotNull XmlAttribute xmlAttribute) { final PsiClass tagClass = getTagClass(xmlAttribute.getParent()); if (tagClass != null) { final PsiType eventHandlerPropertyType = getEventHandlerPropertyType(tagClass, xmlAttribute.getName()); if (eventHandlerPropertyType != null) { final PsiClass controllerClass = getControllerClass(xmlAttribute.getContainingFile()); if (controllerClass != null) { final PsiSubstitutor tagClassSubstitutor = getTagClassSubstitutor(xmlAttribute, controllerClass); final PsiType handlerType = tagClassSubstitutor != null ? tagClassSubstitutor.substitute(eventHandlerPropertyType) : eventHandlerPropertyType; final PsiClassType eventType = substituteEventType(handlerType, xmlAttribute.getProject()); final PsiType erasedType = eraseFreeTypeParameters(eventType, tagClass); return erasedType instanceof PsiClassType ? (PsiClassType)erasedType : null; } } } return null; } @Nullable private static PsiType getEventHandlerPropertyType(@NotNull PsiClass tagClass, @NotNull String eventName) { final PsiMethod[] handlerSetterCandidates = tagClass.findMethodsByName(PropertyUtil.suggestSetterName(eventName), true); for (PsiMethod handlerSetter : handlerSetterCandidates) { if (!handlerSetter.hasModifierProperty(PsiModifier.STATIC) && handlerSetter.hasModifierProperty(PsiModifier.PUBLIC)) { final PsiType propertyType = PropertyUtil.getPropertyType(handlerSetter); if (InheritanceUtil.isInheritor(propertyType, JavaFxCommonNames.JAVAFX_EVENT_EVENT_HANDLER)) { return propertyType; } } } final PsiField handlerField = tagClass.findFieldByName(eventName, true); final PsiClassType propertyType = getPropertyClassType(handlerField); if (InheritanceUtil.isInheritor(propertyType, JavaFxCommonNames.JAVAFX_EVENT_EVENT_HANDLER)) { return propertyType; } return null; } @Nullable private static PsiClassType substituteEventType(@Nullable PsiType eventHandlerType, @NotNull Project project) { if (!(eventHandlerType instanceof PsiClassType)) return null; final PsiClassType.ClassResolveResult resolveResult = ((PsiClassType)eventHandlerType).resolveGenerics(); final PsiClass eventHandlerClass = resolveResult.getElement(); if (eventHandlerClass == null) return null; final PsiSubstitutor eventHandlerClassSubstitutor = resolveResult.getSubstitutor(); final PsiClass eventHandlerInterface = JavaPsiFacade.getInstance(project).findClass(JavaFxCommonNames.JAVAFX_EVENT_EVENT_HANDLER, GlobalSearchScope.allScope(project)); if (eventHandlerInterface == null) return null; if (!InheritanceUtil.isInheritorOrSelf(eventHandlerClass, eventHandlerInterface, true)) return null; final PsiTypeParameter[] typeParameters = eventHandlerInterface.getTypeParameters(); if (typeParameters.length != 1) return null; final PsiTypeParameter eventTypeParameter = typeParameters[0]; final PsiSubstitutor substitutor = TypeConversionUtil.getSuperClassSubstitutor(eventHandlerInterface, eventHandlerClass, eventHandlerClassSubstitutor); final PsiType eventType = substitutor.substitute(eventTypeParameter); if (eventType instanceof PsiClassType) { return (PsiClassType)eventType; } if (eventType instanceof PsiWildcardType) { // TODO Handle wildcards more accurately final PsiType boundType = ((PsiWildcardType)eventType).getBound(); if (boundType instanceof PsiClassType) { return (PsiClassType)boundType; } } return null; } @Nullable private static PsiClass getFactoryProducedClass(@Nullable PsiClass psiClass, @Nullable String factoryMethodName) { if (psiClass == null || factoryMethodName == null) return null; final PsiMethod[] methods = psiClass.findMethodsByName(factoryMethodName, true); for (PsiMethod method : methods) { if (method.getParameterList().getParametersCount() == 0 && method.hasModifierProperty(PsiModifier.STATIC)) { return PsiUtil.resolveClassInClassTypeOnly(method.getReturnType()); } } return null; } @Nullable public static String validateEnumConstant(@NotNull PsiClass enumClass, @NonNls @Nullable String name) { if (!enumClass.isEnum() || name == null) return null; final Set<String> constantNames = CachedValuesManager.getCachedValue(enumClass, () -> CachedValueProvider.Result.create(Arrays.stream(enumClass.getFields()) .filter(PsiEnumConstant.class::isInstance) .map(PsiField::getName) .map(String::toUpperCase) .collect(Collectors.toCollection(THashSet::new)), PsiModificationTracker.JAVA_STRUCTURE_MODIFICATION_COUNT)); if (!constantNames.contains(name.toUpperCase())) { return "No enum constant '" + name + "' in " + enumClass.getQualifiedName(); } return null; } @NotNull public static String getPropertyName(@NotNull String memberName, boolean isMethod) { if (!isMethod) return memberName; final String propertyName = PropertyUtil.getPropertyName(memberName); return propertyName != null ? propertyName : memberName; } @Nullable public static PsiClass getTagValueClass(@NotNull XmlTag xmlTag) { return getTagValueClass(xmlTag, getTagClass(xmlTag)).getFirst(); } @NotNull public static Pair<PsiClass, Boolean> getTagValueClass(@NotNull XmlTag xmlTag, @Nullable PsiClass tagClass) { if (tagClass != null) { final XmlAttribute constAttr = xmlTag.getAttribute(FxmlConstants.FX_CONSTANT); if (constAttr != null) { final PsiField constField = tagClass.findFieldByName(constAttr.getValue(), true); if (constField != null) { final PsiType constType = constField.getType(); return Pair.create(PsiUtil.resolveClassInClassTypeOnly( constType instanceof PsiPrimitiveType ? ((PsiPrimitiveType)constType).getBoxedType(xmlTag) : constType), true); } } else { final XmlAttribute factoryAttr = xmlTag.getAttribute(FxmlConstants.FX_FACTORY); if (factoryAttr != null) { return Pair.create(getFactoryProducedClass(tagClass, factoryAttr.getValue()), true); } } } return Pair.create(tagClass, false); } public static boolean isControllerClass(@NotNull PsiClass psiClass) { final Project project = psiClass.getProject(); final GlobalSearchScope resolveScope = psiClass.getResolveScope(); if (isControllerClassName(project, psiClass.getQualifiedName(), resolveScope)) { return true; } final Ref<Boolean> refFound = new Ref<>(false); ClassInheritorsSearch.search(psiClass, resolveScope, true, true, false).forEach((aClass) -> { if (isControllerClassName(project, aClass.getQualifiedName(), resolveScope)) { refFound.set(true); return false; } return true; }); return refFound.get(); } private static boolean isControllerClassName(@NotNull Project project, @Nullable String qualifiedName, @NotNull GlobalSearchScope resolveScope) { return qualifiedName != null && !JavaFxControllerClassIndex.findFxmlWithController(project, qualifiedName, resolveScope).isEmpty(); } private static class JavaFxControllerCachedValueProvider implements CachedValueProvider<PsiClass> { private final Project myProject; private final PsiFile myContainingFile; public JavaFxControllerCachedValueProvider(Project project, PsiFile containingFile) { myProject = project; myContainingFile = containingFile; } @Nullable @Override public Result<PsiClass> compute() { final Ref<PsiClass> injectedController = new Ref<>(); final Ref<PsiFile> dep = new Ref<>(); final PsiClass fxmlLoader = JavaPsiFacade.getInstance(myProject).findClass(JavaFxCommonNames.JAVAFX_FXML_FXMLLOADER, GlobalSearchScope.allScope(myProject)); if (fxmlLoader != null) { final PsiMethod[] injectControllerMethods = fxmlLoader.findMethodsByName("setController", false); if (injectControllerMethods.length == 1) { final JavaFxRetrieveControllerProcessor processor = new JavaFxRetrieveControllerProcessor() { @Override protected boolean isResolveToSetter(PsiMethodCallExpression methodCallExpression) { return methodCallExpression.resolveMethod() == injectControllerMethods[0]; } }; final GlobalSearchScope globalSearchScope = GlobalSearchScope .notScope(GlobalSearchScope.getScopeRestrictedByFileTypes(myContainingFile.getResolveScope(), StdFileTypes.XML)); ReferencesSearch.search(myContainingFile, globalSearchScope).forEach(reference -> { final PsiElement element = reference.getElement(); if (element instanceof PsiLiteralExpression) { final PsiNewExpression expression = PsiTreeUtil.getParentOfType(element, PsiNewExpression.class); if (expression != null) { final PsiType type = expression.getType(); if (type != null && type.equalsToText(JavaFxCommonNames.JAVAFX_FXML_FXMLLOADER)) { final PsiElement parent = expression.getParent(); if (parent instanceof PsiLocalVariable) { ReferencesSearch.search(parent).forEach(processor); final PsiClass controller = processor.getInjectedController(); if (controller != null) { injectedController.set(controller); dep.set(processor.getContainingFile()); return false; } } } } } return true; }); } } return new Result<>(injectedController.get(), dep.get() != null ? dep.get() : PsiModificationTracker.MODIFICATION_COUNT); } private static abstract class JavaFxRetrieveControllerProcessor implements Processor<PsiReference> { private final Ref<PsiClass> myInjectedController = new Ref<>(); private final Ref<PsiFile> myContainingFile = new Ref<>(); protected abstract boolean isResolveToSetter(PsiMethodCallExpression methodCallExpression); @Override public boolean process(PsiReference reference) { final PsiElement element = reference.getElement(); if (element instanceof PsiReferenceExpression) { final PsiMethodCallExpression methodCallExpression = PsiTreeUtil.getParentOfType(element, PsiMethodCallExpression.class); if (methodCallExpression != null && isResolveToSetter(methodCallExpression)) { final PsiExpression[] expressions = methodCallExpression.getArgumentList().getExpressions(); if (expressions.length > 0) { final PsiClass psiClass = PsiUtil.resolveClassInType(expressions[0].getType()); if (psiClass != null) { myInjectedController.set(psiClass); myContainingFile.set(methodCallExpression.getContainingFile()); return false; } } } } return true; } private PsiClass getInjectedController() { return myInjectedController.get(); } private PsiFile getContainingFile() { return myContainingFile.get(); } } } }
Javafx: Reduced search scope when looking for FXML controller class. Moved doPreventingRecursion() inside the computation of the cached value. (IDEA-160386)
plugins/javaFX/src/org/jetbrains/plugins/javaFX/fxml/JavaFxPsiUtil.java
Javafx: Reduced search scope when looking for FXML controller class. Moved doPreventingRecursion() inside the computation of the cached value. (IDEA-160386)
<ide><path>lugins/javaFX/src/org/jetbrains/plugins/javaFX/fxml/JavaFxPsiUtil.java <ide> } <ide> } <ide> final CachedValuesManager manager = CachedValuesManager.getManager(containingFile.getProject()); <del> final PsiClass injectedControllerClass = ourGuard.doPreventingRecursion(containingFile, true, <del> () -> manager.getCachedValue(containingFile, INJECTED_CONTROLLER, <del> new JavaFxControllerCachedValueProvider(containingFile.getProject(), containingFile), true)); <add> final PsiClass injectedControllerClass = manager.getCachedValue( <add> containingFile, INJECTED_CONTROLLER, () -> computeInjectedControllerClass(containingFile), true); <ide> if (injectedControllerClass != null) { <ide> return injectedControllerClass; <ide> } <ide> return qualifiedName != null && !JavaFxControllerClassIndex.findFxmlWithController(project, qualifiedName, resolveScope).isEmpty(); <ide> } <ide> <del> private static class JavaFxControllerCachedValueProvider implements CachedValueProvider<PsiClass> { <del> private final Project myProject; <del> private final PsiFile myContainingFile; <del> <del> public JavaFxControllerCachedValueProvider(Project project, PsiFile containingFile) { <del> myProject = project; <del> myContainingFile = containingFile; <del> } <del> <del> @Nullable <del> @Override <del> public Result<PsiClass> compute() { <add> @Nullable <add> private static CachedValueProvider.Result<PsiClass> computeInjectedControllerClass(PsiFile containingFile) { <add> return ourGuard.doPreventingRecursion(containingFile, true, () -> { <add> final Project project = containingFile.getProject(); <ide> final Ref<PsiClass> injectedController = new Ref<>(); <ide> final Ref<PsiFile> dep = new Ref<>(); <ide> final PsiClass fxmlLoader = <del> JavaPsiFacade.getInstance(myProject).findClass(JavaFxCommonNames.JAVAFX_FXML_FXMLLOADER, GlobalSearchScope.allScope(myProject)); <add> JavaPsiFacade.getInstance(project).findClass(JavaFxCommonNames.JAVAFX_FXML_FXMLLOADER, GlobalSearchScope.allScope(project)); <ide> if (fxmlLoader != null) { <ide> final PsiMethod[] injectControllerMethods = fxmlLoader.findMethodsByName("setController", false); <ide> if (injectControllerMethods.length == 1) { <ide> } <ide> }; <ide> final GlobalSearchScope globalSearchScope = GlobalSearchScope <del> .notScope(GlobalSearchScope.getScopeRestrictedByFileTypes(myContainingFile.getResolveScope(), StdFileTypes.XML)); <del> ReferencesSearch.search(myContainingFile, globalSearchScope).forEach(reference -> { <add> .getScopeRestrictedByFileTypes(containingFile.getResolveScope(), StdFileTypes.JAVA); <add> ReferencesSearch.search(containingFile, globalSearchScope).forEach(reference -> { <ide> final PsiElement element = reference.getElement(); <ide> if (element instanceof PsiLiteralExpression) { <ide> final PsiNewExpression expression = PsiTreeUtil.getParentOfType(element, PsiNewExpression.class); <ide> }); <ide> } <ide> } <del> return new Result<>(injectedController.get(), dep.get() != null ? dep.get() : PsiModificationTracker.MODIFICATION_COUNT); <del> } <del> <del> private static abstract class JavaFxRetrieveControllerProcessor implements Processor<PsiReference> { <del> private final Ref<PsiClass> myInjectedController = new Ref<>(); <del> private final Ref<PsiFile> myContainingFile = new Ref<>(); <del> <del> protected abstract boolean isResolveToSetter(PsiMethodCallExpression methodCallExpression); <del> <del> @Override <del> public boolean process(PsiReference reference) { <del> final PsiElement element = reference.getElement(); <del> if (element instanceof PsiReferenceExpression) { <del> final PsiMethodCallExpression methodCallExpression = PsiTreeUtil.getParentOfType(element, PsiMethodCallExpression.class); <del> if (methodCallExpression != null && isResolveToSetter(methodCallExpression)) { <del> final PsiExpression[] expressions = methodCallExpression.getArgumentList().getExpressions(); <del> if (expressions.length > 0) { <del> final PsiClass psiClass = PsiUtil.resolveClassInType(expressions[0].getType()); <del> if (psiClass != null) { <del> myInjectedController.set(psiClass); <del> myContainingFile.set(methodCallExpression.getContainingFile()); <del> return false; <del> } <add> return new CachedValueProvider.Result<>(injectedController.get(), dep.get() != null ? dep.get() : PsiModificationTracker.MODIFICATION_COUNT); <add> }); <add> } <add> <add> private static abstract class JavaFxRetrieveControllerProcessor implements Processor<PsiReference> { <add> private final Ref<PsiClass> myInjectedController = new Ref<>(); <add> private final Ref<PsiFile> myContainingFile = new Ref<>(); <add> <add> protected abstract boolean isResolveToSetter(PsiMethodCallExpression methodCallExpression); <add> <add> @Override <add> public boolean process(PsiReference reference) { <add> final PsiElement element = reference.getElement(); <add> if (element instanceof PsiReferenceExpression) { <add> final PsiMethodCallExpression methodCallExpression = PsiTreeUtil.getParentOfType(element, PsiMethodCallExpression.class); <add> if (methodCallExpression != null && isResolveToSetter(methodCallExpression)) { <add> final PsiExpression[] expressions = methodCallExpression.getArgumentList().getExpressions(); <add> if (expressions.length > 0) { <add> final PsiClass psiClass = PsiUtil.resolveClassInType(expressions[0].getType()); <add> if (psiClass != null) { <add> myInjectedController.set(psiClass); <add> myContainingFile.set(methodCallExpression.getContainingFile()); <add> return false; <ide> } <ide> } <ide> } <del> return true; <del> } <del> <del> private PsiClass getInjectedController() { <del> return myInjectedController.get(); <del> } <del> <del> private PsiFile getContainingFile() { <del> return myContainingFile.get(); <del> } <add> } <add> return true; <add> } <add> <add> private PsiClass getInjectedController() { <add> return myInjectedController.get(); <add> } <add> <add> private PsiFile getContainingFile() { <add> return myContainingFile.get(); <ide> } <ide> } <ide> }
Java
apache-2.0
2b7bb3433df7cf18268bf2e79e6cb9b31af7dd37
0
sdgdsffdsfff/hbase-indexer,chenrongwei/hbase-indexer,fiserro/hbase-indexer,NGDATA/hbase-indexer,LucidWorks/hbase-indexer,sdgdsffdsfff/hbase-indexer,cloudera/hbase-indexer,fiserro/hbase-indexer,RandyChen1985/hbase-indexer,cloudera/hbase-indexer,whoschek/hbase-indexer,NGDATA/hbase-indexer,nero520/hbase-indexer,chenrongwei/hbase-indexer,LucidWorks/hbase-indexer,nero520/hbase-indexer,whoschek/hbase-indexer,RandyChen1985/hbase-indexer
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.solr.hadoop; import java.io.BufferedInputStream; import java.io.BufferedReader; import java.io.BufferedWriter; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.OutputStreamWriter; import java.io.PrintWriter; import java.io.Writer; import java.text.NumberFormat; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Comparator; import java.util.List; import java.util.Map; import java.util.Random; import org.kitesdk.morphline.base.Fields; import com.google.common.base.Charsets; import com.google.common.base.Preconditions; import com.google.common.io.ByteStreams; import com.ngdata.hbaseindexer.mr.JobProcessCallback; import com.ngdata.hbaseindexer.mr.NopJobProcessCallback; import net.sourceforge.argparse4j.ArgumentParsers; import net.sourceforge.argparse4j.impl.Arguments; import net.sourceforge.argparse4j.impl.action.HelpArgumentAction; import net.sourceforge.argparse4j.impl.choice.RangeArgumentChoice; import net.sourceforge.argparse4j.impl.type.FileArgumentType; import net.sourceforge.argparse4j.inf.Argument; import net.sourceforge.argparse4j.inf.ArgumentGroup; import net.sourceforge.argparse4j.inf.ArgumentParser; import net.sourceforge.argparse4j.inf.ArgumentParserException; import net.sourceforge.argparse4j.inf.FeatureControl; import net.sourceforge.argparse4j.inf.Namespace; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configured; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.PathFilter; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.NullWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapred.JobClient; import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.JobContext; import org.apache.hadoop.mapreduce.lib.input.NLineInputFormat; import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat; import org.apache.hadoop.util.GenericOptionsParser; import org.apache.hadoop.util.Tool; import org.apache.hadoop.util.ToolRunner; import org.apache.log4j.PropertyConfigurator; import org.apache.solr.common.cloud.SolrZkClient; import org.apache.solr.hadoop.MapReduceIndexerTool.Options; import org.apache.solr.hadoop.dedup.RetainMostRecentUpdateConflictResolver; import org.apache.solr.hadoop.morphline.MorphlineMapRunner; import org.apache.solr.hadoop.morphline.MorphlineMapper; import org.apache.zookeeper.KeeperException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * <b>NOTE:</b> This forked implementation is only here temporarily, in order to facilitate further * development of the HBaseMapReduceIndexerTool. Once the core MapReduceIndexerTool is updated to * allow better reusability from outside tools, this class should be removed. * * * Public API for a MapReduce batch job driver that creates a set of Solr index shards from a set of * input files and writes the indexes into HDFS, in a flexible, scalable and fault-tolerant manner. * Also supports merging the output shards into a set of live customer facing Solr servers, * typically a SolrCloud. */ public class ForkedMapReduceIndexerTool extends Configured implements Tool { Job job; // visible for testing only public static final String RESULTS_DIR = "results"; static final String MAIN_MEMORY_RANDOMIZATION_THRESHOLD = ForkedMapReduceIndexerTool.class.getName() + ".mainMemoryRandomizationThreshold"; private static final String FULL_INPUT_LIST = "full-input-list.txt"; private static final Logger LOG = LoggerFactory.getLogger(ForkedMapReduceIndexerTool.class); /** * See http://argparse4j.sourceforge.net and for details see http://argparse4j.sourceforge.net/usage.html */ static final class MyArgumentParser { /** * Parses the given command line arguments. * * @return exitCode null indicates the caller shall proceed with processing, * non-null indicates the caller shall exit the program with the * given exit status code. */ public Integer parseArgs(String[] args, Configuration conf, Options opts) { assert args != null; assert conf != null; assert opts != null; if (args.length == 0) { args = new String[]{"--help"}; } ArgumentParser parser = ArgumentParsers .newArgumentParser( "hadoop [GenericOptions]... jar search-mr-*-job.jar " + ForkedMapReduceIndexerTool.class.getName(), false) .defaultHelp(true) .description( "MapReduce batch job driver that takes a morphline and creates a set of Solr index shards from a set of input files " + "and writes the indexes into HDFS, in a flexible, scalable and fault-tolerant manner. " + "It also supports merging the output shards into a set of live customer facing Solr servers, " + "typically a SolrCloud. The program proceeds in several consecutive MapReduce based phases, as follows:" + "\n\n" + "1) Randomization phase: This (parallel) phase randomizes the list of input files in order to spread " + "indexing load more evenly among the mappers of the subsequent phase." + "\n\n" + "2) Mapper phase: This (parallel) phase takes the input files, extracts the relevant content, transforms it " + "and hands SolrInputDocuments to a set of reducers. " + "The ETL functionality is flexible and " + "customizable using chains of arbitrary morphline commands that pipe records from one transformation command to another. " + "Commands to parse and transform a set of standard data formats such as Avro, CSV, Text, HTML, XML, " + "PDF, Word, Excel, etc. are provided out of the box, and additional custom commands and parsers for additional " + "file or data formats can be added as morphline plugins. " + "This is done by implementing a simple Java interface that consumes a record (e.g. a file in the form of an InputStream " + "plus some headers plus contextual metadata) and generates as output zero or more records. " + "Any kind of data format can be indexed and any Solr documents for any kind of Solr schema can be generated, " + "and any custom ETL logic can be registered and executed.\n" + "Record fields, including MIME types, can also explicitly be passed by force from the CLI to the morphline, for example: " + "hadoop ... -D " + MorphlineMapRunner.MORPHLINE_FIELD_PREFIX + Fields.ATTACHMENT_MIME_TYPE + "=text/csv" + "\n\n" + "3) Reducer phase: This (parallel) phase loads the mapper's SolrInputDocuments into one EmbeddedSolrServer per reducer. " + "Each such reducer and Solr server can be seen as a (micro) shard. The Solr servers store their " + "data in HDFS." + "\n\n" + "4) Mapper-only merge phase: This (parallel) phase merges the set of reducer shards into the number of solr " + "shards expected by the user, using a mapper-only job. This phase is omitted if the number " + "of shards is already equal to the number of shards expected by the user. " + "\n\n" + "5) Go-live phase: This optional (parallel) phase merges the output shards of the previous phase into a set of " + "live customer facing Solr servers, typically a SolrCloud. " + "If this phase is omitted you can explicitly point each Solr server to one of the HDFS output shard directories." + "\n\n" + "Fault Tolerance: Mapper and reducer task attempts are retried on failure per the standard MapReduce semantics. " + "On program startup all data in the --output-dir is deleted if that output directory already exists. " + "If the whole job fails you can retry simply by rerunning the program again using the same arguments." ); parser.addArgument("--help", "-help", "-h") .help("Show this help message and exit") .action(new HelpArgumentAction() { @Override public void run(ArgumentParser parser, Argument arg, Map<String, Object> attrs, String flag, Object value) throws ArgumentParserException { parser.printHelp(new PrintWriter(System.out)); System.out.println(); System.out.print(ForkedToolRunnerHelpFormatter.getGenericCommandUsage()); //ToolRunner.printGenericCommandUsage(System.out); System.out.println( "Examples: \n\n" + "# (Re)index an Avro based Twitter tweet file:\n" + "sudo -u hdfs hadoop \\\n" + " --config /etc/hadoop/conf.cloudera.mapreduce1 \\\n" + " jar target/search-mr-*-job.jar " + ForkedMapReduceIndexerTool.class.getName() + " \\\n" + " -D 'mapred.child.java.opts=-Xmx500m' \\\n" + // " -D 'mapreduce.child.java.opts=-Xmx500m' \\\n" + " --log4j src/test/resources/log4j.properties \\\n" + " --morphline-file ../search-core/src/test/resources/test-morphlines/tutorialReadAvroContainer.conf \\\n" + " --solr-home-dir src/test/resources/solr/minimr \\\n" + " --output-dir hdfs://c2202.mycompany.com/user/$USER/test \\\n" + " --shards 1 \\\n" + " hdfs:///user/$USER/test-documents/sample-statuses-20120906-141433.avro\n" + "\n" + "# (Re)index all files that match all of the following conditions:\n" + "# 1) File is contained in dir tree hdfs:///user/$USER/solrloadtest/twitter/tweets\n" + "# 2) file name matches the glob pattern 'sample-statuses*.gz'\n" + "# 3) file was last modified less than 100000 minutes ago\n" + "# 4) file size is between 1 MB and 1 GB\n" + "# Also include extra library jar file containing JSON tweet Java parser:\n" + "hadoop jar target/search-mr-*-job.jar " + HdfsFindTool.class.getName() + " \\\n" + " -find hdfs:///user/$USER/solrloadtest/twitter/tweets \\\n" + " -type f \\\n" + " -name 'sample-statuses*.gz' \\\n" + " -mmin -1000000 \\\n" + " -size -100000000c \\\n" + " -size +1000000c \\\n" + "| sudo -u hdfs hadoop \\\n" + " --config /etc/hadoop/conf.cloudera.mapreduce1 \\\n" + " jar target/search-mr-*-job.jar " + ForkedMapReduceIndexerTool.class.getName() + " \\\n" + " -D 'mapred.child.java.opts=-Xmx500m' \\\n" + // " -D 'mapreduce.child.java.opts=-Xmx500m' \\\n" + " --log4j src/test/resources/log4j.properties \\\n" + " --morphline-file ../search-core/src/test/resources/test-morphlines/tutorialReadJsonTestTweets.conf \\\n" + " --solr-home-dir src/test/resources/solr/minimr \\\n" + " --output-dir hdfs://c2202.mycompany.com/user/$USER/test \\\n" + " --shards 100 \\\n" + " --input-list -\n" + "\n" + "# Go live by merging resulting index shards into a live Solr cluster\n" + "# (explicitly specify Solr URLs - for a SolrCloud cluster see next example):\n" + "sudo -u hdfs hadoop \\\n" + " --config /etc/hadoop/conf.cloudera.mapreduce1 \\\n" + " jar target/search-mr-*-job.jar " + ForkedMapReduceIndexerTool.class.getName() + " \\\n" + " -D 'mapred.child.java.opts=-Xmx500m' \\\n" + // " -D 'mapreduce.child.java.opts=-Xmx500m' \\\n" + " --log4j src/test/resources/log4j.properties \\\n" + " --morphline-file ../search-core/src/test/resources/test-morphlines/tutorialReadAvroContainer.conf \\\n" + " --solr-home-dir src/test/resources/solr/minimr \\\n" + " --output-dir hdfs://c2202.mycompany.com/user/$USER/test \\\n" + " --shard-url http://solr001.mycompany.com:8983/solr/collection1 \\\n" + " --shard-url http://solr002.mycompany.com:8983/solr/collection1 \\\n" + " --go-live \\\n" + " hdfs:///user/foo/indir\n" + "\n" + "# Go live by merging resulting index shards into a live SolrCloud cluster\n" + "# (discover shards and Solr URLs through ZooKeeper):\n" + "sudo -u hdfs hadoop \\\n" + " --config /etc/hadoop/conf.cloudera.mapreduce1 \\\n" + " jar target/search-mr-*-job.jar " + ForkedMapReduceIndexerTool.class.getName() + " \\\n" + " -D 'mapred.child.java.opts=-Xmx500m' \\\n" + // " -D 'mapreduce.child.java.opts=-Xmx500m' \\\n" + " --log4j src/test/resources/log4j.properties \\\n" + " --morphline-file ../search-core/src/test/resources/test-morphlines/tutorialReadAvroContainer.conf \\\n" + " --output-dir hdfs://c2202.mycompany.com/user/$USER/test \\\n" + " --zk-host zk01.mycompany.com:2181/solr \\\n" + " --collection collection1 \\\n" + " --go-live \\\n" + " hdfs:///user/foo/indir\n" ); throw new FoundHelpArgument(); // Trick to prevent processing of any remaining arguments } }); ArgumentGroup requiredGroup = parser.addArgumentGroup("Required arguments"); Argument outputDirArg = requiredGroup.addArgument("--output-dir") .metavar("HDFS_URI") .type(new PathArgumentType(conf) { @Override public Path convert(ArgumentParser parser, Argument arg, String value) throws ArgumentParserException { Path path = super.convert(parser, arg, value); if ("hdfs".equals(path.toUri().getScheme()) && path.toUri().getAuthority() == null) { // TODO: consider defaulting to hadoop's fs.default.name here or in SolrRecordWriter.createEmbeddedSolrServer() throw new ArgumentParserException("Missing authority in path URI: " + path, parser); } return path; } }.verifyHasScheme().verifyIsAbsolute().verifyCanWriteParent()) .required(true) .help("HDFS directory to write Solr indexes to. Inside there one output directory per shard will be generated. " + "Example: hdfs://c2202.mycompany.com/user/$USER/test"); Argument inputListArg = parser.addArgument("--input-list") .action(Arguments.append()) .metavar("URI") // .type(new PathArgumentType(fs).verifyExists().verifyCanRead()) .type(Path.class) .help("Local URI or HDFS URI of a UTF-8 encoded file containing a list of HDFS URIs to index, " + "one URI per line in the file. If '-' is specified, URIs are read from the standard input. " + "Multiple --input-list arguments can be specified."); Argument morphlineFileArg = requiredGroup.addArgument("--morphline-file") .metavar("FILE") .type(new FileArgumentType().verifyExists().verifyIsFile().verifyCanRead()) .required(true) .help("Relative or absolute path to a local config file that contains one or more morphlines. " + "The file must be UTF-8 encoded. Example: /path/to/morphline.conf"); Argument morphlineIdArg = parser.addArgument("--morphline-id") .metavar("STRING") .type(String.class) .help("The identifier of the morphline that shall be executed within the morphline config file " + "specified by --morphline-file. If the --morphline-id option is ommitted the first (i.e. " + "top-most) morphline within the config file is used. Example: morphline1"); Argument solrHomeDirArg = parser.addArgument("--solr-home-dir") .metavar("DIR") .type(new FileArgumentType() { @Override public File convert(ArgumentParser parser, Argument arg, String value) throws ArgumentParserException { File solrHomeDir = super.convert(parser, arg, value); File solrConfigFile = new File(new File(solrHomeDir, "conf"), "solrconfig.xml"); new FileArgumentType().verifyExists().verifyIsFile().verifyCanRead().convert( parser, arg, solrConfigFile.getPath()); return solrHomeDir; } }.verifyIsDirectory().verifyCanRead()) .required(false) .help("Relative or absolute path to a local dir containing Solr conf/ dir and in particular " + "conf/solrconfig.xml and optionally also lib/ dir. This directory will be uploaded to each MR task. " + "Example: src/test/resources/solr/minimr"); Argument updateConflictResolverArg = parser.addArgument("--update-conflict-resolver") .metavar("FQCN") .type(String.class) .setDefault(RetainMostRecentUpdateConflictResolver.class.getName()) .help("Fully qualified class name of a Java class that implements the UpdateConflictResolver interface. " + "This enables deduplication and ordering of a series of document updates for the same unique document " + "key. For example, a MapReduce batch job might index multiple files in the same job where some of the " + "files contain old and new versions of the very same document, using the same unique document key.\n" + "Typically, implementations of this interface forbid collisions by throwing an exception, or ignore all but " + "the most recent document version, or, in the general case, order colliding updates ascending from least " + "recent to most recent (partial) update. The caller of this interface (i.e. the Hadoop Reducer) will then " + "apply the updates to Solr in the order returned by the orderUpdates() method.\n" + "The default RetainMostRecentUpdateConflictResolver implementation ignores all but the most recent document " + "version, based on a configurable numeric Solr field, which defaults to the file_last_modified timestamp"); Argument mappersArg = parser.addArgument("--mappers") .metavar("INTEGER") .type(Integer.class) .choices(new RangeArgumentChoice(-1, Integer.MAX_VALUE)) // TODO: also support X% syntax where X is an integer .setDefault(-1) .help("Tuning knob that indicates the maximum number of MR mapper tasks to use. -1 indicates use all map slots " + "available on the cluster."); Argument reducersArg = parser.addArgument("--reducers") .metavar("INTEGER") .type(Integer.class) .choices(new RangeArgumentChoice(-1, Integer.MAX_VALUE)) // TODO: also support X% syntax where X is an integer .setDefault(-1) .help("Tuning knob that indicates the number of reducers to index into. " + "-1 indicates use all reduce slots available on the cluster. " + "0 indicates use one reducer per output shard, which disables the mtree merge MR algorithm. " + "The mtree merge MR algorithm improves scalability by spreading load " + "(in particular CPU load) among a number of parallel reducers that can be much larger than the number " + "of solr shards expected by the user. It can be seen as an extension of concurrent lucene merges " + "and tiered lucene merges to the clustered case. The subsequent mapper-only phase " + "merges the output of said large number of reducers to the number of shards expected by the user, " + "again by utilizing more available parallelism on the cluster."); Argument fanoutArg = parser.addArgument("--fanout") .metavar("INTEGER") .type(Integer.class) .choices(new RangeArgumentChoice(2, Integer.MAX_VALUE)) .setDefault(Integer.MAX_VALUE) .help(FeatureControl.SUPPRESS); Argument maxSegmentsArg = parser.addArgument("--max-segments") .metavar("INTEGER") .type(Integer.class) .choices(new RangeArgumentChoice(1, Integer.MAX_VALUE)) .setDefault(1) .help("Tuning knob that indicates the maximum number of segments to be contained on output in the index of " + "each reducer shard. After a reducer has built its output index it applies a merge policy to merge segments " + "until there are <= maxSegments lucene segments left in this index. " + "Merging segments involves reading and rewriting all data in all these segment files, " + "potentially multiple times, which is very I/O intensive and time consuming. " + "However, an index with fewer segments can later be merged faster, " + "and it can later be queried faster once deployed to a live Solr serving shard. " + "Set maxSegments to 1 to optimize the index for low query latency. " + "In a nutshell, a small maxSegments value trades indexing latency for subsequently improved query latency. " + "This can be a reasonable trade-off for batch indexing systems."); Argument fairSchedulerPoolArg = parser.addArgument("--fair-scheduler-pool") .metavar("STRING") .help("Optional tuning knob that indicates the name of the fair scheduler pool to submit jobs to. " + "The Fair Scheduler is a pluggable MapReduce scheduler that provides a way to share large clusters. " + "Fair scheduling is a method of assigning resources to jobs such that all jobs get, on average, an " + "equal share of resources over time. When there is a single job running, that job uses the entire " + "cluster. When other jobs are submitted, tasks slots that free up are assigned to the new jobs, so " + "that each job gets roughly the same amount of CPU time. Unlike the default Hadoop scheduler, which " + "forms a queue of jobs, this lets short jobs finish in reasonable time while not starving long jobs. " + "It is also an easy way to share a cluster between multiple of users. Fair sharing can also work with " + "job priorities - the priorities are used as weights to determine the fraction of total compute time " + "that each job gets."); Argument dryRunArg = parser.addArgument("--dry-run") .action(Arguments.storeTrue()) .help("Run in local mode and print documents to stdout instead of loading them into Solr. This executes " + "the morphline in the client process (without submitting a job to MR) for quicker turnaround during " + "early trial & debug sessions."); Argument log4jConfigFileArg = parser.addArgument("--log4j") .metavar("FILE") .type(new FileArgumentType().verifyExists().verifyIsFile().verifyCanRead()) .help("Relative or absolute path to a log4j.properties config file on the local file system. This file " + "will be uploaded to each MR task. Example: /path/to/log4j.properties"); Argument verboseArg = parser.addArgument("--verbose", "-v") .action(Arguments.storeTrue()) .help("Turn on verbose output."); ArgumentGroup clusterInfoGroup = parser .addArgumentGroup("Cluster arguments") .description( "Arguments that provide information about your Solr cluster. " + "If you are not using --go-live, pass the --shards argument. If you are building shards for " + "a Non-SolrCloud cluster, pass the --shard-url argument one or more times. To build indexes for" + " a replicated cluster with --shard-url, pass replica urls consecutively and also pass --shards. " + "If you are building shards for a SolrCloud cluster, pass the --zk-host argument. " + "Using --go-live requires either --shard-url or --zk-host."); Argument shardUrlsArg = clusterInfoGroup.addArgument("--shard-url") .metavar("URL") .type(String.class) .action(Arguments.append()) .help("Solr URL to merge resulting shard into if using --go-live. " + "Example: http://solr001.mycompany.com:8983/solr/collection1. " + "Multiple --shard-url arguments can be specified, one for each desired shard. " + "If you are merging shards into a SolrCloud cluster, use --zk-host instead."); Argument zkHostArg = clusterInfoGroup.addArgument("--zk-host") .metavar("STRING") .type(String.class) .help("The address of a ZooKeeper ensemble being used by a SolrCloud cluster. " + "This ZooKeeper ensemble will be examined to determine the number of output " + "shards to create as well as the Solr URLs to merge the output shards into when using the --go-live option. " + "Requires that you also pass the --collection to merge the shards into.\n" + "\n" + "The --zk-host option implements the same partitioning semantics as the standard SolrCloud " + "Near-Real-Time (NRT) API. This enables to mix batch updates from MapReduce ingestion with " + "updates from standard Solr NRT ingestion on the same SolrCloud cluster, " + "using identical unique document keys.\n" + "\n" + "Format is: a list of comma separated host:port pairs, each corresponding to a zk " + "server. Example: '127.0.0.1:2181,127.0.0.1:2182,127.0.0.1:2183' If " + "the optional chroot suffix is used the example would look " + "like: '127.0.0.1:2181/solr,127.0.0.1:2182/solr,127.0.0.1:2183/solr' " + "where the client would be rooted at '/solr' and all paths " + "would be relative to this root - i.e. getting/setting/etc... " + "'/foo/bar' would result in operations being run on " + "'/solr/foo/bar' (from the server perspective).\n" + "\n" + "If --solr-home-dir is not specified, the Solr home directory for the collection " + "will be downloaded from this ZooKeeper ensemble."); Argument shardsArg = clusterInfoGroup.addArgument("--shards") .metavar("INTEGER") .type(Integer.class) .choices(new RangeArgumentChoice(1, Integer.MAX_VALUE)) .help("Number of output shards to generate."); ArgumentGroup goLiveGroup = parser.addArgumentGroup("Go live arguments") .description("Arguments for merging the shards that are built into a live Solr cluster. " + "Also see the Cluster arguments."); Argument goLiveArg = goLiveGroup.addArgument("--go-live") .action(Arguments.storeTrue()) .help("Allows you to optionally merge the final index shards into a live Solr cluster after they are built. " + "You can pass the ZooKeeper address with --zk-host and the relevant cluster information will be auto detected. " + "If you are not using a SolrCloud cluster, --shard-url arguments can be used to specify each SolrCore to merge " + "each shard into."); Argument collectionArg = goLiveGroup.addArgument("--collection") .metavar("STRING") .help("The SolrCloud collection to merge shards into when using --go-live and --zk-host. Example: collection1"); Argument goLiveThreadsArg = goLiveGroup.addArgument("--go-live-threads") .metavar("INTEGER") .type(Integer.class) .choices(new RangeArgumentChoice(1, Integer.MAX_VALUE)) .setDefault(1000) .help("Tuning knob that indicates the maximum number of live merges to run in parallel at one time."); // trailing positional arguments Argument inputFilesArg = parser.addArgument("input-files") .metavar("HDFS_URI") .type(new PathArgumentType(conf).verifyHasScheme().verifyExists().verifyCanRead()) .nargs("*") .setDefault() .help("HDFS URI of file or directory tree to index."); Namespace ns; try { ns = parser.parseArgs(args); } catch (FoundHelpArgument e) { return 0; } catch (ArgumentParserException e) { parser.handleError(e); return 1; } opts.log4jConfigFile = (File) ns.get(log4jConfigFileArg.getDest()); if (opts.log4jConfigFile != null) { PropertyConfigurator.configure(opts.log4jConfigFile.getPath()); } LOG.debug("Parsed command line args: {}", ns); opts.inputLists = ns.getList(inputListArg.getDest()); if (opts.inputLists == null) { opts.inputLists = Collections.EMPTY_LIST; } opts.inputFiles = ns.getList(inputFilesArg.getDest()); opts.outputDir = (Path) ns.get(outputDirArg.getDest()); opts.mappers = ns.getInt(mappersArg.getDest()); opts.reducers = ns.getInt(reducersArg.getDest()); opts.updateConflictResolver = ns.getString(updateConflictResolverArg.getDest()); opts.fanout = ns.getInt(fanoutArg.getDest()); opts.maxSegments = ns.getInt(maxSegmentsArg.getDest()); opts.morphlineFile = (File) ns.get(morphlineFileArg.getDest()); opts.morphlineId = ns.getString(morphlineIdArg.getDest()); opts.solrHomeDir = (File) ns.get(solrHomeDirArg.getDest()); opts.fairSchedulerPool = ns.getString(fairSchedulerPoolArg.getDest()); opts.isDryRun = ns.getBoolean(dryRunArg.getDest()); opts.isVerbose = ns.getBoolean(verboseArg.getDest()); opts.zkHost = ns.getString(zkHostArg.getDest()); opts.shards = ns.getInt(shardsArg.getDest()); opts.shardUrls = buildShardUrls(ns.getList(shardUrlsArg.getDest()), opts.shards); opts.goLive = ns.getBoolean(goLiveArg.getDest()); opts.goLiveThreads = ns.getInt(goLiveThreadsArg.getDest()); opts.collection = ns.getString(collectionArg.getDest()); try { verifyGoLiveArgs(opts, parser); } catch (ArgumentParserException e) { parser.handleError(e); return 1; } if (opts.inputLists.isEmpty() && opts.inputFiles.isEmpty()) { LOG.info("No input files specified - nothing to process"); return 0; // nothing to process } return null; } /** * Marker trick to prevent processing of any remaining arguments once --help option has been parsed */ private static final class FoundHelpArgument extends RuntimeException { } } // END OF INNER CLASS public static List<List<String>> buildShardUrls(List<Object> urls, Integer numShards) { if (urls == null) return null; List<List<String>> shardUrls = new ArrayList<List<String>>(urls.size()); List<String> list = null; int sz; if (numShards == null) { numShards = urls.size(); } sz = (int) Math.ceil(urls.size() / (float) numShards); for (int i = 0; i < urls.size(); i++) { if (i % sz == 0) { list = new ArrayList<String>(); shardUrls.add(list); } list.add((String) urls.get(i)); } return shardUrls; } // TODO Get rid of this, it's just here to get around the fact that // the Options class is not public public static class OptionsBridge { public boolean goLive; public String collection; public String zkHost; public Integer goLiveThreads; public List<List<String>> shardUrls; public List<Path> inputLists; public List<Path> inputFiles; public Path outputDir; public int mappers; public int reducers; public String updateConflictResolver; public int fanout; public Integer shards; public int maxSegments; public File morphlineFile; public String morphlineId; public File solrHomeDir; public String fairSchedulerPool; public boolean isDryRun; public File log4jConfigFile; public boolean isVerbose; public Options asOptions() { Options opts = new Options(); opts.collection = this.collection; opts.fairSchedulerPool = this.fairSchedulerPool; opts.fanout = this.fanout; opts.goLive = this.goLive; opts.goLiveThreads = this.goLiveThreads; opts.isDryRun = this.isDryRun; opts.isVerbose = this.isVerbose; opts.log4jConfigFile = this.log4jConfigFile; opts.mappers = this.mappers; opts.maxSegments = this.maxSegments; opts.morphlineFile = this.morphlineFile; opts.morphlineId = this.morphlineId; opts.outputDir = this.outputDir; opts.reducers = this.reducers; opts.shards = this.shards; opts.shardUrls = this.shardUrls; opts.solrHomeDir = this.solrHomeDir; opts.zkHost = this.zkHost; opts.updateConflictResolver = this.updateConflictResolver; try { // This has to go here because the verifyZKStructure method // expects an Options instance verifyZKStructure(opts, null); } catch (ArgumentParserException e) { throw new RuntimeException(e); } return opts; } } // END OF INNER CLASS /** * API for command line clients */ public static void main(String[] args) throws Exception { int res = ToolRunner.run(new Configuration(), new ForkedMapReduceIndexerTool(), args); System.exit(res); } public ForkedMapReduceIndexerTool() { } @Override public int run(String[] args) throws Exception { Options opts = new Options(); Integer exitCode = new MyArgumentParser().parseArgs(args, getConf(), opts); if (exitCode != null) { return exitCode; } return run(opts); } /** * API for Java clients; visible for testing; may become a public API eventually */ int run(Options options) throws Exception { if ("local".equals(getConf().get("mapred.job.tracker"))) { throw new IllegalStateException( "Running with LocalJobRunner (i.e. all of Hadoop inside a single JVM) is not supported " + "because LocalJobRunner does not (yet) implement the Hadoop Distributed Cache feature, " + "which is required for passing files via --files and --libjars"); } long programStartTime = System.currentTimeMillis(); if (options.fairSchedulerPool != null) { getConf().set("mapred.fairscheduler.pool", options.fairSchedulerPool); } getConf().setInt(SolrOutputFormat.SOLR_RECORD_WRITER_MAX_SEGMENTS, options.maxSegments); // switch off a false warning about allegedly not implementing Tool // also see http://hadoop.6.n7.nabble.com/GenericOptionsParser-warning-td8103.html // also see https://issues.apache.org/jira/browse/HADOOP-8183 getConf().setBoolean("mapred.used.genericoptionsparser", true); if (options.log4jConfigFile != null) { Utils.setLogConfigFile(options.log4jConfigFile, getConf()); addDistributedCacheFile(options.log4jConfigFile, getConf()); } job = Job.getInstance(getConf()); job.setJarByClass(getClass()); if (options.morphlineFile == null) { throw new ArgumentParserException("Argument --morphline-file is required", null); } verifyGoLiveArgs(options, null); verifyZKStructure(options, null); int mappers = new JobClient(job.getConfiguration()).getClusterStatus().getMaxMapTasks(); // MR1 //mappers = job.getCluster().getClusterStatus().getMapSlotCapacity(); // Yarn only LOG.info("Cluster reports {} mapper slots", mappers); if (options.mappers == -1) { mappers = 8 * mappers; // better accomodate stragglers } else { mappers = options.mappers; } if (mappers <= 0) { throw new IllegalStateException("Illegal number of mappers: " + mappers); } options.mappers = mappers; FileSystem fs = options.outputDir.getFileSystem(job.getConfiguration()); if (fs.exists(options.outputDir) && !delete(options.outputDir, true, fs)) { return -1; } Path outputStep2Dir = new Path(options.outputDir, "tmp2"); Path outputStep1Dir = new Path(options.outputDir, "tmp1"); Path fullInputList = new Path(outputStep1Dir, FULL_INPUT_LIST); LOG.debug("Creating list of input files for mappers: {}", fullInputList); long numFiles = addInputFiles(options.inputFiles, options.inputLists, fullInputList, job.getConfiguration()); if (numFiles == 0) { LOG.info("No input files found - nothing to process"); return 0; } int numLinesPerSplit = (int) ceilDivide(numFiles, mappers); if (numLinesPerSplit < 0) { // numeric overflow from downcasting long to int? numLinesPerSplit = Integer.MAX_VALUE; } numLinesPerSplit = Math.max(1, numLinesPerSplit); int realMappers = Math.min(mappers, (int) ceilDivide(numFiles, numLinesPerSplit)); calculateNumReducers(options, realMappers); int reducers = options.reducers; LOG.info("Using these parameters: " + "numFiles: {}, mappers: {}, realMappers: {}, reducers: {}, shards: {}, fanout: {}, maxSegments: {}", new Object[]{numFiles, mappers, realMappers, reducers, options.shards, options.fanout, options.maxSegments}); LOG.info("Randomizing list of {} input files to spread indexing load more evenly among mappers", numFiles); long startTime = System.currentTimeMillis(); if (numFiles < job.getConfiguration().getInt(MAIN_MEMORY_RANDOMIZATION_THRESHOLD, 100001)) { // If there are few input files reduce latency by directly running main memory randomization // instead of launching a high latency MapReduce job randomizeFewInputFiles(fs, outputStep2Dir, fullInputList); } else { // Randomize using a MapReduce job. Use sequential algorithm below a certain threshold because there's no // benefit in using many parallel mapper tasks just to randomize the order of a few lines each int numLinesPerRandomizerSplit = Math.max(10 * 1000 * 1000, numLinesPerSplit); Job randomizerJob = randomizeManyInputFiles(getConf(), fullInputList, outputStep2Dir, numLinesPerRandomizerSplit); if (!waitForCompletion(randomizerJob, options.isVerbose)) { return -1; // job failed } } float secs = (System.currentTimeMillis() - startTime) / 1000.0f; LOG.info("Done. Randomizing list of {} input files took {} secs", numFiles, secs); job.setInputFormatClass(NLineInputFormat.class); NLineInputFormat.addInputPath(job, outputStep2Dir); NLineInputFormat.setNumLinesPerSplit(job, numLinesPerSplit); String mapperClass = job.getConfiguration().get(JobContext.MAP_CLASS_ATTR); if (mapperClass == null) { // enable customization Class clazz = MorphlineMapper.class; mapperClass = clazz.getName(); job.setMapperClass(clazz); } job.setJobName(getClass().getName() + "/" + Utils.getShortClassName(mapperClass)); return runIndexingPipeline(job, new NopJobProcessCallback(), getConf(), options, programStartTime, fs, fullInputList, numFiles, realMappers, reducers); } public static int runIndexingPipeline(Job job, JobProcessCallback callback, Configuration conf, Options options, long programStartTime, FileSystem fs, Path fullInputList, long numFiles, int realMappers, int reducers) throws IOException, KeeperException, InterruptedException, ClassNotFoundException, FileNotFoundException { long startTime; float secs; Path outputResultsDir = new Path(options.outputDir, RESULTS_DIR); Path outputReduceDir = new Path(options.outputDir, "reducers"); Path outputTreeMergeStep = new Path(options.outputDir, "mtree-merge-output"); FileOutputFormat.setOutputPath(job, outputReduceDir); if (job.getConfiguration().get(JobContext.REDUCE_CLASS_ATTR) == null) { // enable customization job.setReducerClass(SolrReducer.class); } if (options.updateConflictResolver == null) { throw new IllegalArgumentException("updateConflictResolver must not be null"); } job.getConfiguration().set(SolrReducer.UPDATE_CONFLICT_RESOLVER, options.updateConflictResolver); job.getConfiguration().setInt(SolrOutputFormat.SOLR_RECORD_WRITER_MAX_SEGMENTS, options.maxSegments); if (options.zkHost != null) { assert options.collection != null; /* * MapReduce partitioner that partitions the Mapper output such that each * SolrInputDocument gets sent to the SolrCloud shard that it would have * been sent to if the document were ingested via the standard SolrCloud * Near Real Time (NRT) API. * * In other words, this class implements the same partitioning semantics * as the standard SolrCloud NRT API. This enables to mix batch updates * from MapReduce ingestion with updates from standard NRT ingestion on * the same SolrCloud cluster, using identical unique document keys. */ if (job.getConfiguration().get(JobContext.PARTITIONER_CLASS_ATTR) == null) { // enable customization job.setPartitionerClass(ForkedSolrCloudPartitioner.class); } job.getConfiguration().set(ForkedSolrCloudPartitioner.ZKHOST, options.zkHost); job.getConfiguration().set(ForkedSolrCloudPartitioner.COLLECTION, options.collection); } job.getConfiguration().setInt(ForkedSolrCloudPartitioner.SHARDS, options.shards); job.setOutputFormatClass(SolrOutputFormat.class); if (options.solrHomeDir != null) { SolrOutputFormat.setupSolrHomeCache(options.solrHomeDir, job); } else { assert options.zkHost != null; // use the config that this collection uses for the SolrHomeCache. ForkedZooKeeperInspector zki = new ForkedZooKeeperInspector(); SolrZkClient zkClient = zki.getZkClient(options.zkHost); try { String configName = zki.readConfigName(zkClient, options.collection); File tmpSolrHomeDir = zki.downloadConfigDir(zkClient, configName); SolrOutputFormat.setupSolrHomeCache(tmpSolrHomeDir, job); LOG.debug("Using " + tmpSolrHomeDir + " as solr home"); options.solrHomeDir = tmpSolrHomeDir; } finally { zkClient.close(); } } // MorphlineMapRunner runner = setupMorphline(job, options); // if (options.isDryRun && runner != null) { // LOG.info("Indexing {} files in dryrun mode", numFiles); // startTime = System.currentTimeMillis(); // dryRun(job, runner, fs, fullInputList); // secs = (System.currentTimeMillis() - startTime) / 1000.0f; // LOG.info("Done. Indexing {} files in dryrun mode took {} secs", numFiles, secs); // goodbye(null, programStartTime); // return 0; // } // job.getConfiguration().set(MorphlineMapRunner.MORPHLINE_FILE_PARAM, options.morphlineFile.getName()); job.setNumReduceTasks(reducers); job.setOutputKeyClass(Text.class); job.setOutputValueClass(SolrInputDocumentWritable.class); LOG.info("Indexing data into {} reducers", new Object[]{reducers}); startTime = System.currentTimeMillis(); job.submit(); callback.jobStarted(job.getJobID().toString(), job.getTrackingURL()); if (!waitForCompletion(job, options.isVerbose)) { return -1; // job failed } secs = (System.currentTimeMillis() - startTime) / 1000.0f; LOG.info("Done. Indexing data into {} reducers took {} secs", new Object[]{reducers, secs}); int mtreeMergeIterations = 0; if (reducers > options.shards) { mtreeMergeIterations = (int) Math.round(log(options.fanout, reducers / options.shards)); } LOG.debug("MTree merge iterations to do: {}", mtreeMergeIterations); int mtreeMergeIteration = 1; while (reducers > options.shards) { // run a mtree merge iteration job = Job.getInstance(conf); job.setJarByClass(ForkedMapReduceIndexerTool.class); job.setJobName( ForkedMapReduceIndexerTool.class.getName() + "/" + Utils.getShortClassName(ForkedTreeMergeMapper.class)); job.setMapperClass(ForkedTreeMergeMapper.class); job.setOutputFormatClass(ForkedTreeMergeOutputFormat.class); job.setNumReduceTasks(0); job.setOutputKeyClass(Text.class); job.setOutputValueClass(NullWritable.class); job.setInputFormatClass(NLineInputFormat.class); Path inputStepDir = new Path(options.outputDir, "mtree-merge-input-iteration" + mtreeMergeIteration); fullInputList = new Path(inputStepDir, FULL_INPUT_LIST); LOG.debug("MTree merge iteration {}/{}: Creating input list file for mappers {}", new Object[]{mtreeMergeIteration, mtreeMergeIterations, fullInputList}); numFiles = createTreeMergeInputDirList(job, outputReduceDir, fs, fullInputList); if (numFiles != reducers) { throw new IllegalStateException("Not same reducers: " + reducers + ", numFiles: " + numFiles); } NLineInputFormat.addInputPath(job, fullInputList); NLineInputFormat.setNumLinesPerSplit(job, options.fanout); FileOutputFormat.setOutputPath(job, outputTreeMergeStep); LOG.info("MTree merge iteration {}/{}: Merging {} shards into {} shards using fanout {}", new Object[]{ mtreeMergeIteration, mtreeMergeIterations, reducers, (reducers / options.fanout), options.fanout}); startTime = System.currentTimeMillis(); job.submit(); callback.jobStarted(job.getJobID().toString(), job.getTrackingURL()); if (!waitForCompletion(job, options.isVerbose)) { return -1; // job failed } if (!renameTreeMergeShardDirs(outputTreeMergeStep, job, fs)) { return -1; } secs = (System.currentTimeMillis() - startTime) / 1000.0f; LOG.info("MTree merge iteration {}/{}: Done. Merging {} shards into {} shards using fanout {} took {} secs", new Object[]{mtreeMergeIteration, mtreeMergeIterations, reducers, (reducers / options.fanout), options.fanout, secs}); if (!delete(outputReduceDir, true, fs)) { return -1; } if (!rename(outputTreeMergeStep, outputReduceDir, fs)) { return -1; } assert reducers % options.fanout == 0; reducers = reducers / options.fanout; mtreeMergeIteration++; } assert reducers == options.shards; // normalize output shard dir prefix, i.e. // rename part-r-00000 to part-00000 (stems from zero tree merge iterations) // rename part-m-00000 to part-00000 (stems from > 0 tree merge iterations) for (FileStatus stats : fs.listStatus(outputReduceDir)) { String dirPrefix = SolrOutputFormat.getOutputName(job); Path srcPath = stats.getPath(); if (stats.isDirectory() && srcPath.getName().startsWith(dirPrefix)) { String dstName = dirPrefix + srcPath.getName().substring(dirPrefix.length() + "-m".length()); Path dstPath = new Path(srcPath.getParent(), dstName); if (!rename(srcPath, dstPath, fs)) { return -1; } } } ; // publish results dir if (!rename(outputReduceDir, outputResultsDir, fs)) { return -1; } if (options.goLive && !new GoLive().goLive(options, listSortedOutputShardDirs(job, outputResultsDir, fs))) { return -1; } goodbye(job, programStartTime); return 0; } private void calculateNumReducers(Options options, int realMappers) throws IOException { if (options.shards <= 0) { throw new IllegalStateException("Illegal number of shards: " + options.shards); } if (options.fanout <= 1) { throw new IllegalStateException("Illegal fanout: " + options.fanout); } if (realMappers <= 0) { throw new IllegalStateException("Illegal realMappers: " + realMappers); } int reducers = new JobClient(job.getConfiguration()).getClusterStatus().getMaxReduceTasks(); // MR1 //reducers = job.getCluster().getClusterStatus().getReduceSlotCapacity(); // Yarn only LOG.info("Cluster reports {} reduce slots", reducers); if (options.reducers == 0) { reducers = options.shards; } else if (options.reducers == -1) { reducers = Math.min(reducers, realMappers); // no need to use many reducers when using few mappers } else { reducers = options.reducers; } reducers = Math.max(reducers, options.shards); if (reducers != options.shards) { // Ensure fanout isn't misconfigured. fanout can't meaningfully be larger than what would be // required to merge all leaf shards in one single tree merge iteration into root shards options.fanout = Math.min(options.fanout, (int) ceilDivide(reducers, options.shards)); // Ensure invariant reducers == options.shards * (fanout ^ N) where N is an integer >= 1. // N is the number of mtree merge iterations. // This helps to evenly spread docs among root shards and simplifies the impl of the mtree merge algorithm. int s = options.shards; while (s < reducers) { s = s * options.fanout; } reducers = s; assert reducers % options.fanout == 0; } options.reducers = reducers; } private long addInputFiles(List<Path> inputFiles, List<Path> inputLists, Path fullInputList, Configuration conf) throws IOException { long numFiles = 0; FileSystem fs = fullInputList.getFileSystem(conf); FSDataOutputStream out = fs.create(fullInputList); try { Writer writer = new BufferedWriter(new OutputStreamWriter(out, "UTF-8")); for (Path inputFile : inputFiles) { FileSystem inputFileFs = inputFile.getFileSystem(conf); if (inputFileFs.exists(inputFile)) { PathFilter pathFilter = new PathFilter() { @Override public boolean accept(Path path) { return !path.getName().startsWith("."); // ignore "hidden" files and dirs } }; numFiles += addInputFilesRecursively(inputFile, writer, inputFileFs, pathFilter); } } for (Path inputList : inputLists) { InputStream in; if (inputList.toString().equals("-")) { in = System.in; } else if (inputList.isAbsoluteAndSchemeAuthorityNull()) { in = new BufferedInputStream(new FileInputStream(inputList.toString())); } else { in = inputList.getFileSystem(conf).open(inputList); } try { BufferedReader reader = new BufferedReader(new InputStreamReader(in, "UTF-8")); String line; while ((line = reader.readLine()) != null) { writer.write(line + "\n"); numFiles++; } reader.close(); } finally { in.close(); } } writer.close(); } finally { out.close(); } return numFiles; } /** * Add the specified file to the input set, if path is a directory then * add the files contained therein. */ private long addInputFilesRecursively(Path path, Writer writer, FileSystem fs, PathFilter pathFilter) throws IOException { long numFiles = 0; for (FileStatus stat : fs.listStatus(path, pathFilter)) { LOG.debug("Adding path {}", stat.getPath()); if (stat.isDirectory()) { numFiles += addInputFilesRecursively(stat.getPath(), writer, fs, pathFilter); } else { writer.write(stat.getPath().toString() + "\n"); numFiles++; } } return numFiles; } private void randomizeFewInputFiles(FileSystem fs, Path outputStep2Dir, Path fullInputList) throws IOException { List<String> lines = new ArrayList(); BufferedReader reader = new BufferedReader(new InputStreamReader(fs.open(fullInputList), "UTF-8")); try { String line; while ((line = reader.readLine()) != null) { lines.add(line); } } finally { reader.close(); } Collections.shuffle(lines, new Random(421439783L)); // constant seed for reproducability FSDataOutputStream out = fs.create(new Path(outputStep2Dir, FULL_INPUT_LIST)); Writer writer = new BufferedWriter(new OutputStreamWriter(out, "UTF-8")); try { for (String line : lines) { writer.write(line + "\n"); } } finally { writer.close(); } } /** * To uniformly spread load across all mappers we randomize fullInputList * with a separate small Mapper & Reducer preprocessing step. This way * each input line ends up on a random position in the output file list. * Each mapper indexes a disjoint consecutive set of files such that each * set has roughly the same size, at least from a probabilistic * perspective. * * For example an input file with the following input list of URLs: * * A * B * C * D * * might be randomized into the following output list of URLs: * * C * A * D * B * * The implementation sorts the list of lines by randomly generated numbers. */ private Job randomizeManyInputFiles(Configuration baseConfig, Path fullInputList, Path outputStep2Dir, int numLinesPerSplit) throws IOException { Job job2 = Job.getInstance(baseConfig); job2.setJarByClass(getClass()); job2.setJobName(getClass().getName() + "/" + Utils.getShortClassName(LineRandomizerMapper.class)); job2.setInputFormatClass(NLineInputFormat.class); NLineInputFormat.addInputPath(job2, fullInputList); NLineInputFormat.setNumLinesPerSplit(job2, numLinesPerSplit); job2.setMapperClass(LineRandomizerMapper.class); job2.setReducerClass(LineRandomizerReducer.class); job2.setOutputFormatClass(TextOutputFormat.class); FileOutputFormat.setOutputPath(job2, outputStep2Dir); job2.setNumReduceTasks(1); job2.setOutputKeyClass(LongWritable.class); job2.setOutputValueClass(Text.class); return job2; } // do the same as if the user had typed 'hadoop ... --files <file>' public static void addDistributedCacheFile(File file, Configuration conf) throws IOException { String HADOOP_TMP_FILES = "tmpfiles"; // see Hadoop's GenericOptionsParser String tmpFiles = conf.get(HADOOP_TMP_FILES, ""); if (tmpFiles.length() > 0) { // already present? tmpFiles = tmpFiles + ","; } GenericOptionsParser parser = new GenericOptionsParser( new Configuration(conf), new String[]{"--files", file.getCanonicalPath()}); String additionalTmpFiles = parser.getConfiguration().get(HADOOP_TMP_FILES); assert additionalTmpFiles != null; assert additionalTmpFiles.length() > 0; tmpFiles += additionalTmpFiles; conf.set(HADOOP_TMP_FILES, tmpFiles); } private static int createTreeMergeInputDirList(Job job, Path outputReduceDir, FileSystem fs, Path fullInputList) throws FileNotFoundException, IOException { FileStatus[] dirs = listSortedOutputShardDirs(job, outputReduceDir, fs); int numFiles = 0; FSDataOutputStream out = fs.create(fullInputList); try { Writer writer = new BufferedWriter(new OutputStreamWriter(out, "UTF-8")); for (FileStatus stat : dirs) { LOG.debug("Adding path {}", stat.getPath()); Path dir = new Path(stat.getPath(), "data/index"); if (!fs.isDirectory(dir)) { throw new IllegalStateException("Not a directory: " + dir); } writer.write(dir.toString() + "\n"); numFiles++; } writer.close(); } finally { out.close(); } return numFiles; } private static FileStatus[] listSortedOutputShardDirs(Job job, Path outputReduceDir, FileSystem fs) throws FileNotFoundException, IOException { final String dirPrefix = SolrOutputFormat.getOutputName(job); FileStatus[] dirs = fs.listStatus(outputReduceDir, new PathFilter() { @Override public boolean accept(Path path) { return path.getName().startsWith(dirPrefix); } }); for (FileStatus dir : dirs) { if (!dir.isDirectory()) { throw new IllegalStateException("Not a directory: " + dir.getPath()); } } // use alphanumeric sort (rather than lexicographical sort) to properly handle more than 99999 shards Arrays.sort(dirs, new Comparator<FileStatus>() { @Override public int compare(FileStatus f1, FileStatus f2) { return new ForkedAlphaNumericComparator().compare(f1.getPath().getName(), f2.getPath().getName()); } }); return dirs; } /* * You can run MapReduceIndexerTool in Solrcloud mode, and once the MR job completes, you can use * the standard solrj Solrcloud API to send doc updates and deletes to SolrCloud, and those updates * and deletes will go to the right Solr shards, and it will work just fine. * * The MapReduce framework doesn't guarantee that input split N goes to the map task with the * taskId = N. The job tracker and Yarn schedule and assign tasks, considering data locality * aspects, but without regard of the input split# withing the overall list of input splits. In * other words, split# != taskId can be true. * * To deal with this issue, our mapper tasks write a little auxiliary meta data file (per task) * that tells the job driver which taskId processed which split#. Once the mapper-only job is * completed, the job driver renames the output dirs such that the dir name contains the true solr * shard id, based on these auxiliary files. * * This way each doc gets assigned to the right Solr shard even with #reducers > #solrshards * * Example for a merge with two shards: * * part-m-00000 and part-m-00001 goes to outputShardNum = 0 and will end up in merged part-m-00000 * part-m-00002 and part-m-00003 goes to outputShardNum = 1 and will end up in merged part-m-00001 * part-m-00004 and part-m-00005 goes to outputShardNum = 2 and will end up in merged part-m-00002 * ... and so on * * Also see run() method above where it uses NLineInputFormat.setNumLinesPerSplit(job, * options.fanout) * * Also see TreeMergeOutputFormat.TreeMergeRecordWriter.writeShardNumberFile() */ private static boolean renameTreeMergeShardDirs(Path outputTreeMergeStep, Job job, FileSystem fs) throws IOException { final String dirPrefix = SolrOutputFormat.getOutputName(job); FileStatus[] dirs = fs.listStatus(outputTreeMergeStep, new PathFilter() { @Override public boolean accept(Path path) { return path.getName().startsWith(dirPrefix); } }); for (FileStatus dir : dirs) { if (!dir.isDirectory()) { throw new IllegalStateException("Not a directory: " + dir.getPath()); } } for (FileStatus dir : dirs) { Path path = dir.getPath(); Path renamedPath = new Path(path.getParent(), "_" + path.getName()); if (!rename(path, renamedPath, fs)) { return false; } } for (FileStatus dir : dirs) { Path path = dir.getPath(); Path renamedPath = new Path(path.getParent(), "_" + path.getName()); Path solrShardNumberFile = new Path(renamedPath, ForkedTreeMergeMapper.SOLR_SHARD_NUMBER); InputStream in = fs.open(solrShardNumberFile); byte[] bytes = ByteStreams.toByteArray(in); in.close(); Preconditions.checkArgument(bytes.length > 0); int solrShard = Integer.parseInt(new String(bytes, Charsets.UTF_8)); if (!delete(solrShardNumberFile, false, fs)) { return false; } // see FileOutputFormat.NUMBER_FORMAT NumberFormat numberFormat = NumberFormat.getInstance(); numberFormat.setMinimumIntegerDigits(5); numberFormat.setGroupingUsed(false); Path finalPath = new Path(renamedPath.getParent(), dirPrefix + "-m-" + numberFormat.format(solrShard)); LOG.info("MTree merge renaming solr shard: " + solrShard + " from dir: " + dir.getPath() + " to dir: " + finalPath); if (!rename(renamedPath, finalPath, fs)) { return false; } } return true; } public static void verifyGoLiveArgs(Options opts, ArgumentParser parser) throws ArgumentParserException { if (opts.zkHost == null && opts.solrHomeDir == null) { throw new ArgumentParserException("At least one of --zk-host or --solr-home-dir is required", parser); } if (opts.goLive && opts.zkHost == null && opts.shardUrls == null) { throw new ArgumentParserException("--go-live requires that you also pass --shard-url or --zk-host", parser); } if (opts.zkHost != null && opts.collection == null) { throw new ArgumentParserException("--zk-host requires that you also pass --collection", parser); } if (opts.zkHost != null) { return; // verify structure of ZK directory later, to avoid checking run-time errors during parsing. } else if (opts.shardUrls != null) { if (opts.shardUrls.size() == 0) { throw new ArgumentParserException("--shard-url requires at least one URL", parser); } } else if (opts.shards != null) { if (opts.shards <= 0) { throw new ArgumentParserException("--shards must be a positive number: " + opts.shards, parser); } } else { throw new ArgumentParserException("You must specify one of the following (mutually exclusive) arguments: " + "--zk-host or --shard-url or --shards", parser); } if (opts.shardUrls != null) { opts.shards = opts.shardUrls.size(); } assert opts.shards != null; assert opts.shards > 0; } private static void verifyZKStructure(Options opts, ArgumentParser parser) throws ArgumentParserException { if (opts.zkHost != null) { assert opts.collection != null; ForkedZooKeeperInspector zki = new ForkedZooKeeperInspector(); try { opts.shardUrls = zki.extractShardUrls(opts.zkHost, opts.collection); } catch (Exception e) { LOG.debug("Cannot extract SolrCloud shard URLs from ZooKeeper", e); throw new ArgumentParserException(e, parser); } assert opts.shardUrls != null; if (opts.shardUrls.size() == 0) { throw new ArgumentParserException("--zk-host requires ZooKeeper " + opts.zkHost + " to contain at least one SolrCore for collection: " + opts.collection, parser); } opts.shards = opts.shardUrls.size(); LOG.debug("Using SolrCloud shard URLs: {}", opts.shardUrls); } } public static boolean waitForCompletion(Job job, boolean isVerbose) throws IOException, InterruptedException, ClassNotFoundException { LOG.debug("Running job: " + getJobInfo(job)); boolean success = job.waitForCompletion(isVerbose); if (!success) { LOG.error("Job failed! " + getJobInfo(job)); } return success; } public static void goodbye(Job job, long startTime) { float secs = (System.currentTimeMillis() - startTime) / 1000.0f; if (job != null) { LOG.info("Succeeded with job: " + getJobInfo(job)); } LOG.info("Success. Done. Program took {} secs. Goodbye.", secs); } private static String getJobInfo(Job job) { return "jobName: " + job.getJobName() + ", jobId: " + job.getJobID(); } private static boolean rename(Path src, Path dst, FileSystem fs) throws IOException { boolean success = fs.rename(src, dst); if (!success) { LOG.error("Cannot rename " + src + " to " + dst); } return success; } private static boolean delete(Path path, boolean recursive, FileSystem fs) throws IOException { boolean success = fs.delete(path, recursive); if (!success) { LOG.error("Cannot delete " + path); } return success; } // same as IntMath.divide(p, q, RoundingMode.CEILING) private long ceilDivide(long p, long q) { long result = p / q; if (p % q != 0) { result++; } return result; } /** * Returns <tt>log<sub>base</sub>value</tt>. */ private static double log(double base, double value) { return Math.log(value) / Math.log(base); } }
hbase-indexer-mr/src/main/java/org/apache/solr/hadoop/ForkedMapReduceIndexerTool.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.solr.hadoop; import java.io.BufferedInputStream; import java.io.BufferedReader; import java.io.BufferedWriter; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.OutputStreamWriter; import java.io.PrintWriter; import java.io.Writer; import java.text.NumberFormat; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Comparator; import java.util.List; import java.util.Map; import java.util.Random; import org.kitesdk.morphline.base.Fields; import com.google.common.base.Charsets; import com.google.common.base.Preconditions; import com.google.common.io.ByteStreams; import com.ngdata.hbaseindexer.mr.JobProcessCallback; import com.ngdata.hbaseindexer.mr.NopJobProcessCallback; import net.sourceforge.argparse4j.ArgumentParsers; import net.sourceforge.argparse4j.impl.Arguments; import net.sourceforge.argparse4j.impl.action.HelpArgumentAction; import net.sourceforge.argparse4j.impl.choice.RangeArgumentChoice; import net.sourceforge.argparse4j.impl.type.FileArgumentType; import net.sourceforge.argparse4j.inf.Argument; import net.sourceforge.argparse4j.inf.ArgumentGroup; import net.sourceforge.argparse4j.inf.ArgumentParser; import net.sourceforge.argparse4j.inf.ArgumentParserException; import net.sourceforge.argparse4j.inf.FeatureControl; import net.sourceforge.argparse4j.inf.Namespace; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configured; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.PathFilter; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.NullWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapred.JobClient; import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.JobContext; import org.apache.hadoop.mapreduce.lib.input.NLineInputFormat; import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat; import org.apache.hadoop.util.GenericOptionsParser; import org.apache.hadoop.util.Tool; import org.apache.hadoop.util.ToolRunner; import org.apache.log4j.PropertyConfigurator; import org.apache.solr.common.cloud.SolrZkClient; import org.apache.solr.hadoop.MapReduceIndexerTool.Options; import org.apache.solr.hadoop.dedup.RetainMostRecentUpdateConflictResolver; import org.apache.solr.hadoop.morphline.MorphlineMapRunner; import org.apache.solr.hadoop.morphline.MorphlineMapper; import org.apache.zookeeper.KeeperException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * <b>NOTE:</b> This forked implementation is only here temporarily, in order to facilitate further * development of the HBaseMapReduceIndexerTool. Once the core MapReduceIndexerTool is updated to * allow better reusability from outside tools, this class should be removed. * * * Public API for a MapReduce batch job driver that creates a set of Solr index shards from a set of * input files and writes the indexes into HDFS, in a flexible, scalable and fault-tolerant manner. * Also supports merging the output shards into a set of live customer facing Solr servers, * typically a SolrCloud. */ public class ForkedMapReduceIndexerTool extends Configured implements Tool { Job job; // visible for testing only public static final String RESULTS_DIR = "results"; static final String MAIN_MEMORY_RANDOMIZATION_THRESHOLD = ForkedMapReduceIndexerTool.class.getName() + ".mainMemoryRandomizationThreshold"; private static final String FULL_INPUT_LIST = "full-input-list.txt"; private static final Logger LOG = LoggerFactory.getLogger(ForkedMapReduceIndexerTool.class); /** * See http://argparse4j.sourceforge.net and for details see http://argparse4j.sourceforge.net/usage.html */ static final class MyArgumentParser { /** * Parses the given command line arguments. * * @return exitCode null indicates the caller shall proceed with processing, * non-null indicates the caller shall exit the program with the * given exit status code. */ public Integer parseArgs(String[] args, Configuration conf, Options opts) { assert args != null; assert conf != null; assert opts != null; if (args.length == 0) { args = new String[]{"--help"}; } ArgumentParser parser = ArgumentParsers .newArgumentParser( "hadoop [GenericOptions]... jar search-mr-*-job.jar " + ForkedMapReduceIndexerTool.class.getName(), false) .defaultHelp(true) .description( "MapReduce batch job driver that takes a morphline and creates a set of Solr index shards from a set of input files " + "and writes the indexes into HDFS, in a flexible, scalable and fault-tolerant manner. " + "It also supports merging the output shards into a set of live customer facing Solr servers, " + "typically a SolrCloud. The program proceeds in several consecutive MapReduce based phases, as follows:" + "\n\n" + "1) Randomization phase: This (parallel) phase randomizes the list of input files in order to spread " + "indexing load more evenly among the mappers of the subsequent phase." + "\n\n" + "2) Mapper phase: This (parallel) phase takes the input files, extracts the relevant content, transforms it " + "and hands SolrInputDocuments to a set of reducers. " + "The ETL functionality is flexible and " + "customizable using chains of arbitrary morphline commands that pipe records from one transformation command to another. " + "Commands to parse and transform a set of standard data formats such as Avro, CSV, Text, HTML, XML, " + "PDF, Word, Excel, etc. are provided out of the box, and additional custom commands and parsers for additional " + "file or data formats can be added as morphline plugins. " + "This is done by implementing a simple Java interface that consumes a record (e.g. a file in the form of an InputStream " + "plus some headers plus contextual metadata) and generates as output zero or more records. " + "Any kind of data format can be indexed and any Solr documents for any kind of Solr schema can be generated, " + "and any custom ETL logic can be registered and executed.\n" + "Record fields, including MIME types, can also explicitly be passed by force from the CLI to the morphline, for example: " + "hadoop ... -D " + MorphlineMapRunner.MORPHLINE_FIELD_PREFIX + Fields.ATTACHMENT_MIME_TYPE + "=text/csv" + "\n\n" + "3) Reducer phase: This (parallel) phase loads the mapper's SolrInputDocuments into one EmbeddedSolrServer per reducer. " + "Each such reducer and Solr server can be seen as a (micro) shard. The Solr servers store their " + "data in HDFS." + "\n\n" + "4) Mapper-only merge phase: This (parallel) phase merges the set of reducer shards into the number of solr " + "shards expected by the user, using a mapper-only job. This phase is omitted if the number " + "of shards is already equal to the number of shards expected by the user. " + "\n\n" + "5) Go-live phase: This optional (parallel) phase merges the output shards of the previous phase into a set of " + "live customer facing Solr servers, typically a SolrCloud. " + "If this phase is omitted you can explicitly point each Solr server to one of the HDFS output shard directories." + "\n\n" + "Fault Tolerance: Mapper and reducer task attempts are retried on failure per the standard MapReduce semantics. " + "On program startup all data in the --output-dir is deleted if that output directory already exists. " + "If the whole job fails you can retry simply by rerunning the program again using the same arguments." ); parser.addArgument("--help", "-help", "-h") .help("Show this help message and exit") .action(new HelpArgumentAction() { @Override public void run(ArgumentParser parser, Argument arg, Map<String, Object> attrs, String flag, Object value) throws ArgumentParserException { parser.printHelp(new PrintWriter(System.out)); System.out.println(); System.out.print(ForkedToolRunnerHelpFormatter.getGenericCommandUsage()); //ToolRunner.printGenericCommandUsage(System.out); System.out.println( "Examples: \n\n" + "# (Re)index an Avro based Twitter tweet file:\n" + "sudo -u hdfs hadoop \\\n" + " --config /etc/hadoop/conf.cloudera.mapreduce1 \\\n" + " jar target/search-mr-*-job.jar " + ForkedMapReduceIndexerTool.class.getName() + " \\\n" + " -D 'mapred.child.java.opts=-Xmx500m' \\\n" + // " -D 'mapreduce.child.java.opts=-Xmx500m' \\\n" + " --log4j src/test/resources/log4j.properties \\\n" + " --morphline-file ../search-core/src/test/resources/test-morphlines/tutorialReadAvroContainer.conf \\\n" + " --solr-home-dir src/test/resources/solr/minimr \\\n" + " --output-dir hdfs://c2202.mycompany.com/user/$USER/test \\\n" + " --shards 1 \\\n" + " hdfs:///user/$USER/test-documents/sample-statuses-20120906-141433.avro\n" + "\n" + "# (Re)index all files that match all of the following conditions:\n" + "# 1) File is contained in dir tree hdfs:///user/$USER/solrloadtest/twitter/tweets\n" + "# 2) file name matches the glob pattern 'sample-statuses*.gz'\n" + "# 3) file was last modified less than 100000 minutes ago\n" + "# 4) file size is between 1 MB and 1 GB\n" + "# Also include extra library jar file containing JSON tweet Java parser:\n" + "hadoop jar target/search-mr-*-job.jar " + HdfsFindTool.class.getName() + " \\\n" + " -find hdfs:///user/$USER/solrloadtest/twitter/tweets \\\n" + " -type f \\\n" + " -name 'sample-statuses*.gz' \\\n" + " -mmin -1000000 \\\n" + " -size -100000000c \\\n" + " -size +1000000c \\\n" + "| sudo -u hdfs hadoop \\\n" + " --config /etc/hadoop/conf.cloudera.mapreduce1 \\\n" + " jar target/search-mr-*-job.jar " + ForkedMapReduceIndexerTool.class.getName() + " \\\n" + " -D 'mapred.child.java.opts=-Xmx500m' \\\n" + // " -D 'mapreduce.child.java.opts=-Xmx500m' \\\n" + " --log4j src/test/resources/log4j.properties \\\n" + " --morphline-file ../search-core/src/test/resources/test-morphlines/tutorialReadJsonTestTweets.conf \\\n" + " --solr-home-dir src/test/resources/solr/minimr \\\n" + " --output-dir hdfs://c2202.mycompany.com/user/$USER/test \\\n" + " --shards 100 \\\n" + " --input-list -\n" + "\n" + "# Go live by merging resulting index shards into a live Solr cluster\n" + "# (explicitly specify Solr URLs - for a SolrCloud cluster see next example):\n" + "sudo -u hdfs hadoop \\\n" + " --config /etc/hadoop/conf.cloudera.mapreduce1 \\\n" + " jar target/search-mr-*-job.jar " + ForkedMapReduceIndexerTool.class.getName() + " \\\n" + " -D 'mapred.child.java.opts=-Xmx500m' \\\n" + // " -D 'mapreduce.child.java.opts=-Xmx500m' \\\n" + " --log4j src/test/resources/log4j.properties \\\n" + " --morphline-file ../search-core/src/test/resources/test-morphlines/tutorialReadAvroContainer.conf \\\n" + " --solr-home-dir src/test/resources/solr/minimr \\\n" + " --output-dir hdfs://c2202.mycompany.com/user/$USER/test \\\n" + " --shard-url http://solr001.mycompany.com:8983/solr/collection1 \\\n" + " --shard-url http://solr002.mycompany.com:8983/solr/collection1 \\\n" + " --go-live \\\n" + " hdfs:///user/foo/indir\n" + "\n" + "# Go live by merging resulting index shards into a live SolrCloud cluster\n" + "# (discover shards and Solr URLs through ZooKeeper):\n" + "sudo -u hdfs hadoop \\\n" + " --config /etc/hadoop/conf.cloudera.mapreduce1 \\\n" + " jar target/search-mr-*-job.jar " + ForkedMapReduceIndexerTool.class.getName() + " \\\n" + " -D 'mapred.child.java.opts=-Xmx500m' \\\n" + // " -D 'mapreduce.child.java.opts=-Xmx500m' \\\n" + " --log4j src/test/resources/log4j.properties \\\n" + " --morphline-file ../search-core/src/test/resources/test-morphlines/tutorialReadAvroContainer.conf \\\n" + " --output-dir hdfs://c2202.mycompany.com/user/$USER/test \\\n" + " --zk-host zk01.mycompany.com:2181/solr \\\n" + " --collection collection1 \\\n" + " --go-live \\\n" + " hdfs:///user/foo/indir\n" ); throw new FoundHelpArgument(); // Trick to prevent processing of any remaining arguments } }); ArgumentGroup requiredGroup = parser.addArgumentGroup("Required arguments"); Argument outputDirArg = requiredGroup.addArgument("--output-dir") .metavar("HDFS_URI") .type(new PathArgumentType(conf) { @Override public Path convert(ArgumentParser parser, Argument arg, String value) throws ArgumentParserException { Path path = super.convert(parser, arg, value); if ("hdfs".equals(path.toUri().getScheme()) && path.toUri().getAuthority() == null) { // TODO: consider defaulting to hadoop's fs.default.name here or in SolrRecordWriter.createEmbeddedSolrServer() throw new ArgumentParserException("Missing authority in path URI: " + path, parser); } return path; } }.verifyHasScheme().verifyIsAbsolute().verifyCanWriteParent()) .required(true) .help("HDFS directory to write Solr indexes to. Inside there one output directory per shard will be generated. " + "Example: hdfs://c2202.mycompany.com/user/$USER/test"); Argument inputListArg = parser.addArgument("--input-list") .action(Arguments.append()) .metavar("URI") // .type(new PathArgumentType(fs).verifyExists().verifyCanRead()) .type(Path.class) .help("Local URI or HDFS URI of a UTF-8 encoded file containing a list of HDFS URIs to index, " + "one URI per line in the file. If '-' is specified, URIs are read from the standard input. " + "Multiple --input-list arguments can be specified."); Argument morphlineFileArg = requiredGroup.addArgument("--morphline-file") .metavar("FILE") .type(new FileArgumentType().verifyExists().verifyIsFile().verifyCanRead()) .required(true) .help("Relative or absolute path to a local config file that contains one or more morphlines. " + "The file must be UTF-8 encoded. Example: /path/to/morphline.conf"); Argument morphlineIdArg = parser.addArgument("--morphline-id") .metavar("STRING") .type(String.class) .help("The identifier of the morphline that shall be executed within the morphline config file " + "specified by --morphline-file. If the --morphline-id option is ommitted the first (i.e. " + "top-most) morphline within the config file is used. Example: morphline1"); Argument solrHomeDirArg = parser.addArgument("--solr-home-dir") .metavar("DIR") .type(new FileArgumentType() { @Override public File convert(ArgumentParser parser, Argument arg, String value) throws ArgumentParserException { File solrHomeDir = super.convert(parser, arg, value); File solrConfigFile = new File(new File(solrHomeDir, "conf"), "solrconfig.xml"); new FileArgumentType().verifyExists().verifyIsFile().verifyCanRead().convert( parser, arg, solrConfigFile.getPath()); return solrHomeDir; } }.verifyIsDirectory().verifyCanRead()) .required(false) .help("Relative or absolute path to a local dir containing Solr conf/ dir and in particular " + "conf/solrconfig.xml and optionally also lib/ dir. This directory will be uploaded to each MR task. " + "Example: src/test/resources/solr/minimr"); Argument updateConflictResolverArg = parser.addArgument("--update-conflict-resolver") .metavar("FQCN") .type(String.class) .setDefault(RetainMostRecentUpdateConflictResolver.class.getName()) .help("Fully qualified class name of a Java class that implements the UpdateConflictResolver interface. " + "This enables deduplication and ordering of a series of document updates for the same unique document " + "key. For example, a MapReduce batch job might index multiple files in the same job where some of the " + "files contain old and new versions of the very same document, using the same unique document key.\n" + "Typically, implementations of this interface forbid collisions by throwing an exception, or ignore all but " + "the most recent document version, or, in the general case, order colliding updates ascending from least " + "recent to most recent (partial) update. The caller of this interface (i.e. the Hadoop Reducer) will then " + "apply the updates to Solr in the order returned by the orderUpdates() method.\n" + "The default RetainMostRecentUpdateConflictResolver implementation ignores all but the most recent document " + "version, based on a configurable numeric Solr field, which defaults to the file_last_modified timestamp"); Argument mappersArg = parser.addArgument("--mappers") .metavar("INTEGER") .type(Integer.class) .choices(new RangeArgumentChoice(-1, Integer.MAX_VALUE)) // TODO: also support X% syntax where X is an integer .setDefault(-1) .help("Tuning knob that indicates the maximum number of MR mapper tasks to use. -1 indicates use all map slots " + "available on the cluster."); Argument reducersArg = parser.addArgument("--reducers") .metavar("INTEGER") .type(Integer.class) .choices(new RangeArgumentChoice(-1, Integer.MAX_VALUE)) // TODO: also support X% syntax where X is an integer .setDefault(-1) .help("Tuning knob that indicates the number of reducers to index into. " + "-1 indicates use all reduce slots available on the cluster. " + "0 indicates use one reducer per output shard, which disables the mtree merge MR algorithm. " + "The mtree merge MR algorithm improves scalability by spreading load " + "(in particular CPU load) among a number of parallel reducers that can be much larger than the number " + "of solr shards expected by the user. It can be seen as an extension of concurrent lucene merges " + "and tiered lucene merges to the clustered case. The subsequent mapper-only phase " + "merges the output of said large number of reducers to the number of shards expected by the user, " + "again by utilizing more available parallelism on the cluster."); Argument fanoutArg = parser.addArgument("--fanout") .metavar("INTEGER") .type(Integer.class) .choices(new RangeArgumentChoice(2, Integer.MAX_VALUE)) .setDefault(Integer.MAX_VALUE) .help(FeatureControl.SUPPRESS); Argument maxSegmentsArg = parser.addArgument("--max-segments") .metavar("INTEGER") .type(Integer.class) .choices(new RangeArgumentChoice(1, Integer.MAX_VALUE)) .setDefault(1) .help("Tuning knob that indicates the maximum number of segments to be contained on output in the index of " + "each reducer shard. After a reducer has built its output index it applies a merge policy to merge segments " + "until there are <= maxSegments lucene segments left in this index. " + "Merging segments involves reading and rewriting all data in all these segment files, " + "potentially multiple times, which is very I/O intensive and time consuming. " + "However, an index with fewer segments can later be merged faster, " + "and it can later be queried faster once deployed to a live Solr serving shard. " + "Set maxSegments to 1 to optimize the index for low query latency. " + "In a nutshell, a small maxSegments value trades indexing latency for subsequently improved query latency. " + "This can be a reasonable trade-off for batch indexing systems."); Argument fairSchedulerPoolArg = parser.addArgument("--fair-scheduler-pool") .metavar("STRING") .help("Optional tuning knob that indicates the name of the fair scheduler pool to submit jobs to. " + "The Fair Scheduler is a pluggable MapReduce scheduler that provides a way to share large clusters. " + "Fair scheduling is a method of assigning resources to jobs such that all jobs get, on average, an " + "equal share of resources over time. When there is a single job running, that job uses the entire " + "cluster. When other jobs are submitted, tasks slots that free up are assigned to the new jobs, so " + "that each job gets roughly the same amount of CPU time. Unlike the default Hadoop scheduler, which " + "forms a queue of jobs, this lets short jobs finish in reasonable time while not starving long jobs. " + "It is also an easy way to share a cluster between multiple of users. Fair sharing can also work with " + "job priorities - the priorities are used as weights to determine the fraction of total compute time " + "that each job gets."); Argument dryRunArg = parser.addArgument("--dry-run") .action(Arguments.storeTrue()) .help("Run in local mode and print documents to stdout instead of loading them into Solr. This executes " + "the morphline in the client process (without submitting a job to MR) for quicker turnaround during " + "early trial & debug sessions."); Argument log4jConfigFileArg = parser.addArgument("--log4j") .metavar("FILE") .type(new FileArgumentType().verifyExists().verifyIsFile().verifyCanRead()) .help("Relative or absolute path to a log4j.properties config file on the local file system. This file " + "will be uploaded to each MR task. Example: /path/to/log4j.properties"); Argument verboseArg = parser.addArgument("--verbose", "-v") .action(Arguments.storeTrue()) .help("Turn on verbose output."); ArgumentGroup clusterInfoGroup = parser .addArgumentGroup("Cluster arguments") .description( "Arguments that provide information about your Solr cluster. " + "If you are not using --go-live, pass the --shards argument. If you are building shards for " + "a Non-SolrCloud cluster, pass the --shard-url argument one or more times. To build indexes for" + " a replicated cluster with --shard-url, pass replica urls consecutively and also pass --shards. " + "If you are building shards for a SolrCloud cluster, pass the --zk-host argument. " + "Using --go-live requires either --shard-url or --zk-host."); Argument shardUrlsArg = clusterInfoGroup.addArgument("--shard-url") .metavar("URL") .type(String.class) .action(Arguments.append()) .help("Solr URL to merge resulting shard into if using --go-live. " + "Example: http://solr001.mycompany.com:8983/solr/collection1. " + "Multiple --shard-url arguments can be specified, one for each desired shard. " + "If you are merging shards into a SolrCloud cluster, use --zk-host instead."); Argument zkHostArg = clusterInfoGroup.addArgument("--zk-host") .metavar("STRING") .type(String.class) .help("The address of a ZooKeeper ensemble being used by a SolrCloud cluster. " + "This ZooKeeper ensemble will be examined to determine the number of output " + "shards to create as well as the Solr URLs to merge the output shards into when using the --go-live option. " + "Requires that you also pass the --collection to merge the shards into.\n" + "\n" + "The --zk-host option implements the same partitioning semantics as the standard SolrCloud " + "Near-Real-Time (NRT) API. This enables to mix batch updates from MapReduce ingestion with " + "updates from standard Solr NRT ingestion on the same SolrCloud cluster, " + "using identical unique document keys.\n" + "\n" + "Format is: a list of comma separated host:port pairs, each corresponding to a zk " + "server. Example: '127.0.0.1:2181,127.0.0.1:2182,127.0.0.1:2183' If " + "the optional chroot suffix is used the example would look " + "like: '127.0.0.1:2181/solr,127.0.0.1:2182/solr,127.0.0.1:2183/solr' " + "where the client would be rooted at '/solr' and all paths " + "would be relative to this root - i.e. getting/setting/etc... " + "'/foo/bar' would result in operations being run on " + "'/solr/foo/bar' (from the server perspective).\n" + "\n" + "If --solr-home-dir is not specified, the Solr home directory for the collection " + "will be downloaded from this ZooKeeper ensemble."); Argument shardsArg = clusterInfoGroup.addArgument("--shards") .metavar("INTEGER") .type(Integer.class) .choices(new RangeArgumentChoice(1, Integer.MAX_VALUE)) .help("Number of output shards to generate."); ArgumentGroup goLiveGroup = parser.addArgumentGroup("Go live arguments") .description("Arguments for merging the shards that are built into a live Solr cluster. " + "Also see the Cluster arguments."); Argument goLiveArg = goLiveGroup.addArgument("--go-live") .action(Arguments.storeTrue()) .help("Allows you to optionally merge the final index shards into a live Solr cluster after they are built. " + "You can pass the ZooKeeper address with --zk-host and the relevant cluster information will be auto detected. " + "If you are not using a SolrCloud cluster, --shard-url arguments can be used to specify each SolrCore to merge " + "each shard into."); Argument collectionArg = goLiveGroup.addArgument("--collection") .metavar("STRING") .help("The SolrCloud collection to merge shards into when using --go-live and --zk-host. Example: collection1"); Argument goLiveThreadsArg = goLiveGroup.addArgument("--go-live-threads") .metavar("INTEGER") .type(Integer.class) .choices(new RangeArgumentChoice(1, Integer.MAX_VALUE)) .setDefault(1000) .help("Tuning knob that indicates the maximum number of live merges to run in parallel at one time."); // trailing positional arguments Argument inputFilesArg = parser.addArgument("input-files") .metavar("HDFS_URI") .type(new PathArgumentType(conf).verifyHasScheme().verifyExists().verifyCanRead()) .nargs("*") .setDefault() .help("HDFS URI of file or directory tree to index."); Namespace ns; try { ns = parser.parseArgs(args); } catch (FoundHelpArgument e) { return 0; } catch (ArgumentParserException e) { parser.handleError(e); return 1; } opts.log4jConfigFile = (File) ns.get(log4jConfigFileArg.getDest()); if (opts.log4jConfigFile != null) { PropertyConfigurator.configure(opts.log4jConfigFile.getPath()); } LOG.debug("Parsed command line args: {}", ns); opts.inputLists = ns.getList(inputListArg.getDest()); if (opts.inputLists == null) { opts.inputLists = Collections.EMPTY_LIST; } opts.inputFiles = ns.getList(inputFilesArg.getDest()); opts.outputDir = (Path) ns.get(outputDirArg.getDest()); opts.mappers = ns.getInt(mappersArg.getDest()); opts.reducers = ns.getInt(reducersArg.getDest()); opts.updateConflictResolver = ns.getString(updateConflictResolverArg.getDest()); opts.fanout = ns.getInt(fanoutArg.getDest()); opts.maxSegments = ns.getInt(maxSegmentsArg.getDest()); opts.morphlineFile = (File) ns.get(morphlineFileArg.getDest()); opts.morphlineId = ns.getString(morphlineIdArg.getDest()); opts.solrHomeDir = (File) ns.get(solrHomeDirArg.getDest()); opts.fairSchedulerPool = ns.getString(fairSchedulerPoolArg.getDest()); opts.isDryRun = ns.getBoolean(dryRunArg.getDest()); opts.isVerbose = ns.getBoolean(verboseArg.getDest()); opts.zkHost = ns.getString(zkHostArg.getDest()); opts.shards = ns.getInt(shardsArg.getDest()); opts.shardUrls = buildShardUrls(ns.getList(shardUrlsArg.getDest()), opts.shards); opts.goLive = ns.getBoolean(goLiveArg.getDest()); opts.goLiveThreads = ns.getInt(goLiveThreadsArg.getDest()); opts.collection = ns.getString(collectionArg.getDest()); try { verifyGoLiveArgs(opts, parser); } catch (ArgumentParserException e) { parser.handleError(e); return 1; } if (opts.inputLists.isEmpty() && opts.inputFiles.isEmpty()) { LOG.info("No input files specified - nothing to process"); return 0; // nothing to process } return null; } /** * Marker trick to prevent processing of any remaining arguments once --help option has been parsed */ private static final class FoundHelpArgument extends RuntimeException { } } // END OF INNER CLASS public static List<List<String>> buildShardUrls(List<Object> urls, Integer numShards) { if (urls == null) return null; List<List<String>> shardUrls = new ArrayList<List<String>>(urls.size()); List<String> list = null; int sz; if (numShards == null) { numShards = urls.size(); } sz = (int) Math.ceil(urls.size() / (float) numShards); for (int i = 0; i < urls.size(); i++) { if (i % sz == 0) { list = new ArrayList<String>(); shardUrls.add(list); } list.add((String) urls.get(i)); } return shardUrls; } // TODO Get rid of this, it's just here to get around the fact that // the Options class is not public public static class OptionsBridge { public boolean goLive; public String collection; public String zkHost; public Integer goLiveThreads; public List<List<String>> shardUrls; public List<Path> inputLists; public List<Path> inputFiles; public Path outputDir; public int mappers; public int reducers; public String updateConflictResolver; public int fanout; public Integer shards; public int maxSegments; public File morphlineFile; public String morphlineId; public File solrHomeDir; public String fairSchedulerPool; public boolean isDryRun; public File log4jConfigFile; public boolean isVerbose; public Options asOptions() { Options opts = new Options(); opts.collection = this.collection; opts.fairSchedulerPool = this.fairSchedulerPool; opts.fanout = this.fanout; opts.goLive = this.goLive; opts.goLiveThreads = this.goLiveThreads; opts.isDryRun = this.isDryRun; opts.isVerbose = this.isVerbose; opts.log4jConfigFile = this.log4jConfigFile; opts.mappers = this.mappers; opts.maxSegments = this.maxSegments; opts.morphlineFile = this.morphlineFile; opts.morphlineId = this.morphlineId; opts.outputDir = this.outputDir; opts.reducers = this.reducers; opts.shards = this.shards; opts.shardUrls = this.shardUrls; opts.solrHomeDir = this.solrHomeDir; opts.zkHost = this.zkHost; opts.updateConflictResolver = this.updateConflictResolver; try { // This has to go here because the verifyZKStructure method // expects an Options instance verifyZKStructure(opts, null); } catch (ArgumentParserException e) { throw new RuntimeException(e); } return opts; } } // END OF INNER CLASS /** * API for command line clients */ public static void main(String[] args) throws Exception { int res = ToolRunner.run(new Configuration(), new ForkedMapReduceIndexerTool(), args); System.exit(res); } public ForkedMapReduceIndexerTool() { } @Override public int run(String[] args) throws Exception { Options opts = new Options(); Integer exitCode = new MyArgumentParser().parseArgs(args, getConf(), opts); if (exitCode != null) { return exitCode; } return run(opts); } /** * API for Java clients; visible for testing; may become a public API eventually */ int run(Options options) throws Exception { if ("local".equals(getConf().get("mapred.job.tracker"))) { throw new IllegalStateException( "Running with LocalJobRunner (i.e. all of Hadoop inside a single JVM) is not supported " + "because LocalJobRunner does not (yet) implement the Hadoop Distributed Cache feature, " + "which is required for passing files via --files and --libjars"); } long programStartTime = System.currentTimeMillis(); if (options.fairSchedulerPool != null) { getConf().set("mapred.fairscheduler.pool", options.fairSchedulerPool); } getConf().setInt(SolrOutputFormat.SOLR_RECORD_WRITER_MAX_SEGMENTS, options.maxSegments); // switch off a false warning about allegedly not implementing Tool // also see http://hadoop.6.n7.nabble.com/GenericOptionsParser-warning-td8103.html // also see https://issues.apache.org/jira/browse/HADOOP-8183 getConf().setBoolean("mapred.used.genericoptionsparser", true); if (options.log4jConfigFile != null) { Utils.setLogConfigFile(options.log4jConfigFile, getConf()); addDistributedCacheFile(options.log4jConfigFile, getConf()); } job = Job.getInstance(getConf()); job.setJarByClass(getClass()); if (options.morphlineFile == null) { throw new ArgumentParserException("Argument --morphline-file is required", null); } verifyGoLiveArgs(options, null); verifyZKStructure(options, null); int mappers = new JobClient(job.getConfiguration()).getClusterStatus().getMaxMapTasks(); // MR1 //mappers = job.getCluster().getClusterStatus().getMapSlotCapacity(); // Yarn only LOG.info("Cluster reports {} mapper slots", mappers); if (options.mappers == -1) { mappers = 8 * mappers; // better accomodate stragglers } else { mappers = options.mappers; } if (mappers <= 0) { throw new IllegalStateException("Illegal number of mappers: " + mappers); } options.mappers = mappers; FileSystem fs = options.outputDir.getFileSystem(job.getConfiguration()); if (fs.exists(options.outputDir) && !delete(options.outputDir, true, fs)) { return -1; } Path outputStep2Dir = new Path(options.outputDir, "tmp2"); Path outputStep1Dir = new Path(options.outputDir, "tmp1"); Path fullInputList = new Path(outputStep1Dir, FULL_INPUT_LIST); LOG.debug("Creating list of input files for mappers: {}", fullInputList); long numFiles = addInputFiles(options.inputFiles, options.inputLists, fullInputList, job.getConfiguration()); if (numFiles == 0) { LOG.info("No input files found - nothing to process"); return 0; } int numLinesPerSplit = (int) ceilDivide(numFiles, mappers); if (numLinesPerSplit < 0) { // numeric overflow from downcasting long to int? numLinesPerSplit = Integer.MAX_VALUE; } numLinesPerSplit = Math.max(1, numLinesPerSplit); int realMappers = Math.min(mappers, (int) ceilDivide(numFiles, numLinesPerSplit)); calculateNumReducers(options, realMappers); int reducers = options.reducers; LOG.info("Using these parameters: " + "numFiles: {}, mappers: {}, realMappers: {}, reducers: {}, shards: {}, fanout: {}, maxSegments: {}", new Object[]{numFiles, mappers, realMappers, reducers, options.shards, options.fanout, options.maxSegments}); LOG.info("Randomizing list of {} input files to spread indexing load more evenly among mappers", numFiles); long startTime = System.currentTimeMillis(); if (numFiles < job.getConfiguration().getInt(MAIN_MEMORY_RANDOMIZATION_THRESHOLD, 100001)) { // If there are few input files reduce latency by directly running main memory randomization // instead of launching a high latency MapReduce job randomizeFewInputFiles(fs, outputStep2Dir, fullInputList); } else { // Randomize using a MapReduce job. Use sequential algorithm below a certain threshold because there's no // benefit in using many parallel mapper tasks just to randomize the order of a few lines each int numLinesPerRandomizerSplit = Math.max(10 * 1000 * 1000, numLinesPerSplit); Job randomizerJob = randomizeManyInputFiles(getConf(), fullInputList, outputStep2Dir, numLinesPerRandomizerSplit); if (!waitForCompletion(randomizerJob, options.isVerbose)) { return -1; // job failed } } float secs = (System.currentTimeMillis() - startTime) / 1000.0f; LOG.info("Done. Randomizing list of {} input files took {} secs", numFiles, secs); job.setInputFormatClass(NLineInputFormat.class); NLineInputFormat.addInputPath(job, outputStep2Dir); NLineInputFormat.setNumLinesPerSplit(job, numLinesPerSplit); String mapperClass = job.getConfiguration().get(JobContext.MAP_CLASS_ATTR); if (mapperClass == null) { // enable customization Class clazz = MorphlineMapper.class; mapperClass = clazz.getName(); job.setMapperClass(clazz); } job.setJobName(getClass().getName() + "/" + Utils.getShortClassName(mapperClass)); return runIndexingPipeline(job, new NopJobProcessCallback(), getConf(), options, programStartTime, fs, fullInputList, numFiles, realMappers, reducers); } public static int runIndexingPipeline(Job job, JobProcessCallback callback, Configuration conf, Options options, long programStartTime, FileSystem fs, Path fullInputList, long numFiles, int realMappers, int reducers) throws IOException, KeeperException, InterruptedException, ClassNotFoundException, FileNotFoundException { long startTime; float secs; Path outputResultsDir = new Path(options.outputDir, RESULTS_DIR); Path outputReduceDir = new Path(options.outputDir, "reducers"); Path outputTreeMergeStep = new Path(options.outputDir, "mtree-merge-output"); FileOutputFormat.setOutputPath(job, outputReduceDir); if (job.getConfiguration().get(JobContext.REDUCE_CLASS_ATTR) == null) { // enable customization job.setReducerClass(SolrReducer.class); } if (options.updateConflictResolver == null) { throw new IllegalArgumentException("updateConflictResolver must not be null"); } job.getConfiguration().set(SolrReducer.UPDATE_CONFLICT_RESOLVER, options.updateConflictResolver); job.getConfiguration().setInt(SolrOutputFormat.SOLR_RECORD_WRITER_MAX_SEGMENTS, options.maxSegments); if (options.zkHost != null) { assert options.collection != null; /* * MapReduce partitioner that partitions the Mapper output such that each * SolrInputDocument gets sent to the SolrCloud shard that it would have * been sent to if the document were ingested via the standard SolrCloud * Near Real Time (NRT) API. * * In other words, this class implements the same partitioning semantics * as the standard SolrCloud NRT API. This enables to mix batch updates * from MapReduce ingestion with updates from standard NRT ingestion on * the same SolrCloud cluster, using identical unique document keys. */ if (job.getConfiguration().get(JobContext.PARTITIONER_CLASS_ATTR) == null) { // enable customization job.setPartitionerClass(ForkedSolrCloudPartitioner.class); } job.getConfiguration().set(ForkedSolrCloudPartitioner.ZKHOST, options.zkHost); job.getConfiguration().set(ForkedSolrCloudPartitioner.COLLECTION, options.collection); } job.getConfiguration().setInt(ForkedSolrCloudPartitioner.SHARDS, options.shards); job.setOutputFormatClass(SolrOutputFormat.class); if (options.solrHomeDir != null) { SolrOutputFormat.setupSolrHomeCache(options.solrHomeDir, job); } else { assert options.zkHost != null; // use the config that this collection uses for the SolrHomeCache. ForkedZooKeeperInspector zki = new ForkedZooKeeperInspector(); SolrZkClient zkClient = zki.getZkClient(options.zkHost); try { String configName = zki.readConfigName(zkClient, options.collection); File tmpSolrHomeDir = zki.downloadConfigDir(zkClient, configName); SolrOutputFormat.setupSolrHomeCache(tmpSolrHomeDir, job); System.out.println("Using " + tmpSolrHomeDir + " as solr home"); options.solrHomeDir = tmpSolrHomeDir; } finally { zkClient.close(); } } // MorphlineMapRunner runner = setupMorphline(job, options); // if (options.isDryRun && runner != null) { // LOG.info("Indexing {} files in dryrun mode", numFiles); // startTime = System.currentTimeMillis(); // dryRun(job, runner, fs, fullInputList); // secs = (System.currentTimeMillis() - startTime) / 1000.0f; // LOG.info("Done. Indexing {} files in dryrun mode took {} secs", numFiles, secs); // goodbye(null, programStartTime); // return 0; // } // job.getConfiguration().set(MorphlineMapRunner.MORPHLINE_FILE_PARAM, options.morphlineFile.getName()); job.setNumReduceTasks(reducers); job.setOutputKeyClass(Text.class); job.setOutputValueClass(SolrInputDocumentWritable.class); LOG.info("Indexing data into {} reducers", new Object[]{reducers}); startTime = System.currentTimeMillis(); job.submit(); callback.jobStarted(job.getJobID().toString(), job.getTrackingURL()); if (!waitForCompletion(job, options.isVerbose)) { return -1; // job failed } secs = (System.currentTimeMillis() - startTime) / 1000.0f; LOG.info("Done. Indexing data into {} reducers took {} secs", new Object[]{reducers, secs}); int mtreeMergeIterations = 0; if (reducers > options.shards) { mtreeMergeIterations = (int) Math.round(log(options.fanout, reducers / options.shards)); } LOG.debug("MTree merge iterations to do: {}", mtreeMergeIterations); int mtreeMergeIteration = 1; while (reducers > options.shards) { // run a mtree merge iteration job = Job.getInstance(conf); job.setJarByClass(ForkedMapReduceIndexerTool.class); job.setJobName( ForkedMapReduceIndexerTool.class.getName() + "/" + Utils.getShortClassName(ForkedTreeMergeMapper.class)); job.setMapperClass(ForkedTreeMergeMapper.class); job.setOutputFormatClass(ForkedTreeMergeOutputFormat.class); job.setNumReduceTasks(0); job.setOutputKeyClass(Text.class); job.setOutputValueClass(NullWritable.class); job.setInputFormatClass(NLineInputFormat.class); Path inputStepDir = new Path(options.outputDir, "mtree-merge-input-iteration" + mtreeMergeIteration); fullInputList = new Path(inputStepDir, FULL_INPUT_LIST); LOG.debug("MTree merge iteration {}/{}: Creating input list file for mappers {}", new Object[]{mtreeMergeIteration, mtreeMergeIterations, fullInputList}); numFiles = createTreeMergeInputDirList(job, outputReduceDir, fs, fullInputList); if (numFiles != reducers) { throw new IllegalStateException("Not same reducers: " + reducers + ", numFiles: " + numFiles); } NLineInputFormat.addInputPath(job, fullInputList); NLineInputFormat.setNumLinesPerSplit(job, options.fanout); FileOutputFormat.setOutputPath(job, outputTreeMergeStep); LOG.info("MTree merge iteration {}/{}: Merging {} shards into {} shards using fanout {}", new Object[]{ mtreeMergeIteration, mtreeMergeIterations, reducers, (reducers / options.fanout), options.fanout}); startTime = System.currentTimeMillis(); job.submit(); callback.jobStarted(job.getJobID().toString(), job.getTrackingURL()); if (!waitForCompletion(job, options.isVerbose)) { return -1; // job failed } if (!renameTreeMergeShardDirs(outputTreeMergeStep, job, fs)) { return -1; } secs = (System.currentTimeMillis() - startTime) / 1000.0f; LOG.info("MTree merge iteration {}/{}: Done. Merging {} shards into {} shards using fanout {} took {} secs", new Object[]{mtreeMergeIteration, mtreeMergeIterations, reducers, (reducers / options.fanout), options.fanout, secs}); if (!delete(outputReduceDir, true, fs)) { return -1; } if (!rename(outputTreeMergeStep, outputReduceDir, fs)) { return -1; } assert reducers % options.fanout == 0; reducers = reducers / options.fanout; mtreeMergeIteration++; } assert reducers == options.shards; // normalize output shard dir prefix, i.e. // rename part-r-00000 to part-00000 (stems from zero tree merge iterations) // rename part-m-00000 to part-00000 (stems from > 0 tree merge iterations) for (FileStatus stats : fs.listStatus(outputReduceDir)) { String dirPrefix = SolrOutputFormat.getOutputName(job); Path srcPath = stats.getPath(); if (stats.isDirectory() && srcPath.getName().startsWith(dirPrefix)) { String dstName = dirPrefix + srcPath.getName().substring(dirPrefix.length() + "-m".length()); Path dstPath = new Path(srcPath.getParent(), dstName); if (!rename(srcPath, dstPath, fs)) { return -1; } } } ; // publish results dir if (!rename(outputReduceDir, outputResultsDir, fs)) { return -1; } if (options.goLive && !new GoLive().goLive(options, listSortedOutputShardDirs(job, outputResultsDir, fs))) { return -1; } goodbye(job, programStartTime); return 0; } private void calculateNumReducers(Options options, int realMappers) throws IOException { if (options.shards <= 0) { throw new IllegalStateException("Illegal number of shards: " + options.shards); } if (options.fanout <= 1) { throw new IllegalStateException("Illegal fanout: " + options.fanout); } if (realMappers <= 0) { throw new IllegalStateException("Illegal realMappers: " + realMappers); } int reducers = new JobClient(job.getConfiguration()).getClusterStatus().getMaxReduceTasks(); // MR1 //reducers = job.getCluster().getClusterStatus().getReduceSlotCapacity(); // Yarn only LOG.info("Cluster reports {} reduce slots", reducers); if (options.reducers == 0) { reducers = options.shards; } else if (options.reducers == -1) { reducers = Math.min(reducers, realMappers); // no need to use many reducers when using few mappers } else { reducers = options.reducers; } reducers = Math.max(reducers, options.shards); if (reducers != options.shards) { // Ensure fanout isn't misconfigured. fanout can't meaningfully be larger than what would be // required to merge all leaf shards in one single tree merge iteration into root shards options.fanout = Math.min(options.fanout, (int) ceilDivide(reducers, options.shards)); // Ensure invariant reducers == options.shards * (fanout ^ N) where N is an integer >= 1. // N is the number of mtree merge iterations. // This helps to evenly spread docs among root shards and simplifies the impl of the mtree merge algorithm. int s = options.shards; while (s < reducers) { s = s * options.fanout; } reducers = s; assert reducers % options.fanout == 0; } options.reducers = reducers; } private long addInputFiles(List<Path> inputFiles, List<Path> inputLists, Path fullInputList, Configuration conf) throws IOException { long numFiles = 0; FileSystem fs = fullInputList.getFileSystem(conf); FSDataOutputStream out = fs.create(fullInputList); try { Writer writer = new BufferedWriter(new OutputStreamWriter(out, "UTF-8")); for (Path inputFile : inputFiles) { FileSystem inputFileFs = inputFile.getFileSystem(conf); if (inputFileFs.exists(inputFile)) { PathFilter pathFilter = new PathFilter() { @Override public boolean accept(Path path) { return !path.getName().startsWith("."); // ignore "hidden" files and dirs } }; numFiles += addInputFilesRecursively(inputFile, writer, inputFileFs, pathFilter); } } for (Path inputList : inputLists) { InputStream in; if (inputList.toString().equals("-")) { in = System.in; } else if (inputList.isAbsoluteAndSchemeAuthorityNull()) { in = new BufferedInputStream(new FileInputStream(inputList.toString())); } else { in = inputList.getFileSystem(conf).open(inputList); } try { BufferedReader reader = new BufferedReader(new InputStreamReader(in, "UTF-8")); String line; while ((line = reader.readLine()) != null) { writer.write(line + "\n"); numFiles++; } reader.close(); } finally { in.close(); } } writer.close(); } finally { out.close(); } return numFiles; } /** * Add the specified file to the input set, if path is a directory then * add the files contained therein. */ private long addInputFilesRecursively(Path path, Writer writer, FileSystem fs, PathFilter pathFilter) throws IOException { long numFiles = 0; for (FileStatus stat : fs.listStatus(path, pathFilter)) { LOG.debug("Adding path {}", stat.getPath()); if (stat.isDirectory()) { numFiles += addInputFilesRecursively(stat.getPath(), writer, fs, pathFilter); } else { writer.write(stat.getPath().toString() + "\n"); numFiles++; } } return numFiles; } private void randomizeFewInputFiles(FileSystem fs, Path outputStep2Dir, Path fullInputList) throws IOException { List<String> lines = new ArrayList(); BufferedReader reader = new BufferedReader(new InputStreamReader(fs.open(fullInputList), "UTF-8")); try { String line; while ((line = reader.readLine()) != null) { lines.add(line); } } finally { reader.close(); } Collections.shuffle(lines, new Random(421439783L)); // constant seed for reproducability FSDataOutputStream out = fs.create(new Path(outputStep2Dir, FULL_INPUT_LIST)); Writer writer = new BufferedWriter(new OutputStreamWriter(out, "UTF-8")); try { for (String line : lines) { writer.write(line + "\n"); } } finally { writer.close(); } } /** * To uniformly spread load across all mappers we randomize fullInputList * with a separate small Mapper & Reducer preprocessing step. This way * each input line ends up on a random position in the output file list. * Each mapper indexes a disjoint consecutive set of files such that each * set has roughly the same size, at least from a probabilistic * perspective. * * For example an input file with the following input list of URLs: * * A * B * C * D * * might be randomized into the following output list of URLs: * * C * A * D * B * * The implementation sorts the list of lines by randomly generated numbers. */ private Job randomizeManyInputFiles(Configuration baseConfig, Path fullInputList, Path outputStep2Dir, int numLinesPerSplit) throws IOException { Job job2 = Job.getInstance(baseConfig); job2.setJarByClass(getClass()); job2.setJobName(getClass().getName() + "/" + Utils.getShortClassName(LineRandomizerMapper.class)); job2.setInputFormatClass(NLineInputFormat.class); NLineInputFormat.addInputPath(job2, fullInputList); NLineInputFormat.setNumLinesPerSplit(job2, numLinesPerSplit); job2.setMapperClass(LineRandomizerMapper.class); job2.setReducerClass(LineRandomizerReducer.class); job2.setOutputFormatClass(TextOutputFormat.class); FileOutputFormat.setOutputPath(job2, outputStep2Dir); job2.setNumReduceTasks(1); job2.setOutputKeyClass(LongWritable.class); job2.setOutputValueClass(Text.class); return job2; } // do the same as if the user had typed 'hadoop ... --files <file>' public static void addDistributedCacheFile(File file, Configuration conf) throws IOException { String HADOOP_TMP_FILES = "tmpfiles"; // see Hadoop's GenericOptionsParser String tmpFiles = conf.get(HADOOP_TMP_FILES, ""); if (tmpFiles.length() > 0) { // already present? tmpFiles = tmpFiles + ","; } GenericOptionsParser parser = new GenericOptionsParser( new Configuration(conf), new String[]{"--files", file.getCanonicalPath()}); String additionalTmpFiles = parser.getConfiguration().get(HADOOP_TMP_FILES); assert additionalTmpFiles != null; assert additionalTmpFiles.length() > 0; tmpFiles += additionalTmpFiles; conf.set(HADOOP_TMP_FILES, tmpFiles); } private static int createTreeMergeInputDirList(Job job, Path outputReduceDir, FileSystem fs, Path fullInputList) throws FileNotFoundException, IOException { FileStatus[] dirs = listSortedOutputShardDirs(job, outputReduceDir, fs); int numFiles = 0; FSDataOutputStream out = fs.create(fullInputList); try { Writer writer = new BufferedWriter(new OutputStreamWriter(out, "UTF-8")); for (FileStatus stat : dirs) { LOG.debug("Adding path {}", stat.getPath()); Path dir = new Path(stat.getPath(), "data/index"); if (!fs.isDirectory(dir)) { throw new IllegalStateException("Not a directory: " + dir); } writer.write(dir.toString() + "\n"); numFiles++; } writer.close(); } finally { out.close(); } return numFiles; } private static FileStatus[] listSortedOutputShardDirs(Job job, Path outputReduceDir, FileSystem fs) throws FileNotFoundException, IOException { final String dirPrefix = SolrOutputFormat.getOutputName(job); FileStatus[] dirs = fs.listStatus(outputReduceDir, new PathFilter() { @Override public boolean accept(Path path) { return path.getName().startsWith(dirPrefix); } }); for (FileStatus dir : dirs) { if (!dir.isDirectory()) { throw new IllegalStateException("Not a directory: " + dir.getPath()); } } // use alphanumeric sort (rather than lexicographical sort) to properly handle more than 99999 shards Arrays.sort(dirs, new Comparator<FileStatus>() { @Override public int compare(FileStatus f1, FileStatus f2) { return new ForkedAlphaNumericComparator().compare(f1.getPath().getName(), f2.getPath().getName()); } }); return dirs; } /* * You can run MapReduceIndexerTool in Solrcloud mode, and once the MR job completes, you can use * the standard solrj Solrcloud API to send doc updates and deletes to SolrCloud, and those updates * and deletes will go to the right Solr shards, and it will work just fine. * * The MapReduce framework doesn't guarantee that input split N goes to the map task with the * taskId = N. The job tracker and Yarn schedule and assign tasks, considering data locality * aspects, but without regard of the input split# withing the overall list of input splits. In * other words, split# != taskId can be true. * * To deal with this issue, our mapper tasks write a little auxiliary meta data file (per task) * that tells the job driver which taskId processed which split#. Once the mapper-only job is * completed, the job driver renames the output dirs such that the dir name contains the true solr * shard id, based on these auxiliary files. * * This way each doc gets assigned to the right Solr shard even with #reducers > #solrshards * * Example for a merge with two shards: * * part-m-00000 and part-m-00001 goes to outputShardNum = 0 and will end up in merged part-m-00000 * part-m-00002 and part-m-00003 goes to outputShardNum = 1 and will end up in merged part-m-00001 * part-m-00004 and part-m-00005 goes to outputShardNum = 2 and will end up in merged part-m-00002 * ... and so on * * Also see run() method above where it uses NLineInputFormat.setNumLinesPerSplit(job, * options.fanout) * * Also see TreeMergeOutputFormat.TreeMergeRecordWriter.writeShardNumberFile() */ private static boolean renameTreeMergeShardDirs(Path outputTreeMergeStep, Job job, FileSystem fs) throws IOException { final String dirPrefix = SolrOutputFormat.getOutputName(job); FileStatus[] dirs = fs.listStatus(outputTreeMergeStep, new PathFilter() { @Override public boolean accept(Path path) { return path.getName().startsWith(dirPrefix); } }); for (FileStatus dir : dirs) { if (!dir.isDirectory()) { throw new IllegalStateException("Not a directory: " + dir.getPath()); } } for (FileStatus dir : dirs) { Path path = dir.getPath(); Path renamedPath = new Path(path.getParent(), "_" + path.getName()); if (!rename(path, renamedPath, fs)) { return false; } } for (FileStatus dir : dirs) { Path path = dir.getPath(); Path renamedPath = new Path(path.getParent(), "_" + path.getName()); Path solrShardNumberFile = new Path(renamedPath, ForkedTreeMergeMapper.SOLR_SHARD_NUMBER); InputStream in = fs.open(solrShardNumberFile); byte[] bytes = ByteStreams.toByteArray(in); in.close(); Preconditions.checkArgument(bytes.length > 0); int solrShard = Integer.parseInt(new String(bytes, Charsets.UTF_8)); if (!delete(solrShardNumberFile, false, fs)) { return false; } // see FileOutputFormat.NUMBER_FORMAT NumberFormat numberFormat = NumberFormat.getInstance(); numberFormat.setMinimumIntegerDigits(5); numberFormat.setGroupingUsed(false); Path finalPath = new Path(renamedPath.getParent(), dirPrefix + "-m-" + numberFormat.format(solrShard)); LOG.info("MTree merge renaming solr shard: " + solrShard + " from dir: " + dir.getPath() + " to dir: " + finalPath); if (!rename(renamedPath, finalPath, fs)) { return false; } } return true; } public static void verifyGoLiveArgs(Options opts, ArgumentParser parser) throws ArgumentParserException { if (opts.zkHost == null && opts.solrHomeDir == null) { throw new ArgumentParserException("At least one of --zk-host or --solr-home-dir is required", parser); } if (opts.goLive && opts.zkHost == null && opts.shardUrls == null) { throw new ArgumentParserException("--go-live requires that you also pass --shard-url or --zk-host", parser); } if (opts.zkHost != null && opts.collection == null) { throw new ArgumentParserException("--zk-host requires that you also pass --collection", parser); } if (opts.zkHost != null) { return; // verify structure of ZK directory later, to avoid checking run-time errors during parsing. } else if (opts.shardUrls != null) { if (opts.shardUrls.size() == 0) { throw new ArgumentParserException("--shard-url requires at least one URL", parser); } } else if (opts.shards != null) { if (opts.shards <= 0) { throw new ArgumentParserException("--shards must be a positive number: " + opts.shards, parser); } } else { throw new ArgumentParserException("You must specify one of the following (mutually exclusive) arguments: " + "--zk-host or --shard-url or --shards", parser); } if (opts.shardUrls != null) { opts.shards = opts.shardUrls.size(); } assert opts.shards != null; assert opts.shards > 0; } private static void verifyZKStructure(Options opts, ArgumentParser parser) throws ArgumentParserException { if (opts.zkHost != null) { assert opts.collection != null; ForkedZooKeeperInspector zki = new ForkedZooKeeperInspector(); try { opts.shardUrls = zki.extractShardUrls(opts.zkHost, opts.collection); } catch (Exception e) { LOG.debug("Cannot extract SolrCloud shard URLs from ZooKeeper", e); throw new ArgumentParserException(e, parser); } assert opts.shardUrls != null; if (opts.shardUrls.size() == 0) { throw new ArgumentParserException("--zk-host requires ZooKeeper " + opts.zkHost + " to contain at least one SolrCore for collection: " + opts.collection, parser); } opts.shards = opts.shardUrls.size(); LOG.debug("Using SolrCloud shard URLs: {}", opts.shardUrls); } } public static boolean waitForCompletion(Job job, boolean isVerbose) throws IOException, InterruptedException, ClassNotFoundException { LOG.debug("Running job: " + getJobInfo(job)); boolean success = job.waitForCompletion(isVerbose); if (!success) { LOG.error("Job failed! " + getJobInfo(job)); } return success; } public static void goodbye(Job job, long startTime) { float secs = (System.currentTimeMillis() - startTime) / 1000.0f; if (job != null) { LOG.info("Succeeded with job: " + getJobInfo(job)); } LOG.info("Success. Done. Program took {} secs. Goodbye.", secs); } private static String getJobInfo(Job job) { return "jobName: " + job.getJobName() + ", jobId: " + job.getJobID(); } private static boolean rename(Path src, Path dst, FileSystem fs) throws IOException { boolean success = fs.rename(src, dst); if (!success) { LOG.error("Cannot rename " + src + " to " + dst); } return success; } private static boolean delete(Path path, boolean recursive, FileSystem fs) throws IOException { boolean success = fs.delete(path, recursive); if (!success) { LOG.error("Cannot delete " + path); } return success; } // same as IntMath.divide(p, q, RoundingMode.CEILING) private long ceilDivide(long p, long q) { long result = p / q; if (p % q != 0) { result++; } return result; } /** * Returns <tt>log<sub>base</sub>value</tt>. */ private static double log(double base, double value) { return Math.log(value) / Math.log(base); } }
replace use of System.out with LOG.debug
hbase-indexer-mr/src/main/java/org/apache/solr/hadoop/ForkedMapReduceIndexerTool.java
replace use of System.out with LOG.debug
<ide><path>base-indexer-mr/src/main/java/org/apache/solr/hadoop/ForkedMapReduceIndexerTool.java <ide> String configName = zki.readConfigName(zkClient, options.collection); <ide> File tmpSolrHomeDir = zki.downloadConfigDir(zkClient, configName); <ide> SolrOutputFormat.setupSolrHomeCache(tmpSolrHomeDir, job); <del> System.out.println("Using " + tmpSolrHomeDir + " as solr home"); <add> LOG.debug("Using " + tmpSolrHomeDir + " as solr home"); <ide> options.solrHomeDir = tmpSolrHomeDir; <ide> } finally { <ide> zkClient.close();
Java
apache-2.0
1b64d4f5b24f6d2729560fac614550794583c2cd
0
joelind/zxing-iphone,zilaiyedaren/zxing-iphone,zilaiyedaren/zxing-iphone,joelind/zxing-iphone,joelind/zxing-iphone,joelind/zxing-iphone,zilaiyedaren/zxing-iphone,zilaiyedaren/zxing-iphone,zilaiyedaren/zxing-iphone,joelind/zxing-iphone,zilaiyedaren/zxing-iphone,zilaiyedaren/zxing-iphone,zilaiyedaren/zxing-iphone,joelind/zxing-iphone,joelind/zxing-iphone,joelind/zxing-iphone
/* * Copyright 2009 ZXing authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.zxing.client.androidtest; import com.google.zxing.LuminanceSource; import android.graphics.Bitmap; import android.graphics.BitmapFactory; import java.io.FileNotFoundException; /** * This class is used to help decode images from files which arrive as RGB data from * Android bitmaps. It does not support cropping or rotation. * * @author [email protected] (Daniel Switkin) */ public final class RGBLuminanceSource extends LuminanceSource { private final byte[] luminances; public RGBLuminanceSource(String path) throws FileNotFoundException { this(loadBitmap(path)); } public RGBLuminanceSource(Bitmap bitmap) { super(bitmap.getWidth(), bitmap.getHeight()); int width = bitmap.getWidth(); int height = bitmap.getHeight(); int[] pixels = new int[width * height]; bitmap.getPixels(pixels, 0, width, 0, 0, width, height); // In order to measure pure decoding speed, we convert the entire image to a greyscale array // up front, which is the same as the Y channel of the YUVLuminanceSource in the real app. luminances = new byte[width * height]; for (int y = 0; y < height; y++) { int offset = y * width; for (int x = 0; x < width; x++) { int pixel = pixels[offset + x]; int r = (pixel >> 16) & 0xff; int g = (pixel >> 8) & 0xff; int b = pixel & 0xff; if (r == g && g == b) { // Image is already greyscale, so pick any channel. luminances[offset + x] = (byte) r; } else { // Calculate luminance cheaply, favoring green. luminances[offset + x] = (byte) ((r + g + g + b) >> 2); } } } } public byte[] getRow(int y, byte[] row) { if (y < 0 || y >= getHeight()) { throw new IllegalArgumentException("Requested row is outside the image: " + y); } int width = getWidth(); if (row == null || row.length < width) { row = new byte[width]; } System.arraycopy(luminances, y * width, row, 0, width); return row; } // Since this class does not support cropping, the underlying byte array already contains // exactly what the caller is asking for, so give it to them without a copy. public byte[] getMatrix() { return luminances; } private static Bitmap loadBitmap(String path) throws FileNotFoundException { Bitmap bitmap = BitmapFactory.decodeFile(path); if (bitmap == null) { throw new FileNotFoundException("Couldn't open " + path); } return bitmap; } }
androidtest/src/com/google/zxing/client/androidtest/RGBLuminanceSource.java
/* * Copyright 2009 ZXing authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.zxing.client.androidtest; import com.google.zxing.LuminanceSource; import android.graphics.Bitmap; import android.graphics.BitmapFactory; import java.io.FileNotFoundException; /** * This class is used to help decode images from files which arrive as RGB data from * Android bitmaps. It does not support cropping or rotation. * * @author [email protected] (Daniel Switkin) */ public final class RGBLuminanceSource extends LuminanceSource { private final byte[] luminances; public RGBLuminanceSource(String path) throws FileNotFoundException { this(loadBitmap(path)); } public RGBLuminanceSource(Bitmap bitmap) { super(bitmap.getWidth(), bitmap.getHeight()); int width = bitmap.getWidth(); int height = bitmap.getHeight(); int[] pixels = new int[width * height]; bitmap.getPixels(pixels, 0, width, 0, 0, width, height); // In order to measure pure decoding speed, we convert the entire image to a greyscale array // up front, which is the same as the Y channel of the YUVLuminanceSource in the real app. luminances = new byte[width * height]; for (int y = 0; y < height; y++) { int offset = y * height; for (int x = 0; x < width; x++) { int pixel = pixels[offset + x]; int r = (pixel >> 16) & 0xff; int g = (pixel >> 8) & 0xff; int b = pixel & 0xff; if (r == g && g == b) { // Image is already greyscale, so pick any channel. luminances[offset + x] = (byte) r; } else { // Calculate luminance cheaply, favoring green. luminances[offset + x] = (byte) ((r + g + g + b) >> 2); } } } } public byte[] getRow(int y, byte[] row) { if (y < 0 || y >= getHeight()) { throw new IllegalArgumentException("Requested row is outside the image: " + y); } int width = getWidth(); if (row == null || row.length < width) { row = new byte[width]; } System.arraycopy(luminances, y * width, row, 0, width); return row; } // Since this class does not support cropping, the underlying byte array already contains // exactly what the caller is asking for, so give it to them without a copy. public byte[] getMatrix() { return luminances; } private static Bitmap loadBitmap(String path) throws FileNotFoundException { Bitmap bitmap = BitmapFactory.decodeFile(path); if (bitmap == null) { throw new FileNotFoundException("Couldn't open " + path); } return bitmap; } }
Whoops, fixed long-standing bug in the benchmark. Never noticed before because we always used square images.
androidtest/src/com/google/zxing/client/androidtest/RGBLuminanceSource.java
Whoops, fixed long-standing bug in the benchmark. Never noticed before because we always used square images.
<ide><path>ndroidtest/src/com/google/zxing/client/androidtest/RGBLuminanceSource.java <ide> // up front, which is the same as the Y channel of the YUVLuminanceSource in the real app. <ide> luminances = new byte[width * height]; <ide> for (int y = 0; y < height; y++) { <del> int offset = y * height; <add> int offset = y * width; <ide> for (int x = 0; x < width; x++) { <ide> int pixel = pixels[offset + x]; <ide> int r = (pixel >> 16) & 0xff;
JavaScript
apache-2.0
b75b1616a1ca6efc9726bede3f10c0ff6c81b365
0
stdlib-js/stdlib,stdlib-js/stdlib,stdlib-js/stdlib,stdlib-js/stdlib,stdlib-js/stdlib,stdlib-js/stdlib,stdlib-js/stdlib,stdlib-js/stdlib
'use strict'; // MODULES // var bench = require( '@stdlib/bench' ); var randu = require( '@stdlib/math/base/random/randu' ); var isnan = require( '@stdlib/math/base/utils/is-nan' ); var pow = require( '@stdlib/math/base/special/pow' ); var pkg = require( './../package.json' ).name; var dasum = require( './../lib/wasm.js' )(); // FUNCTIONS // /** * Wraps `dasum`, explicitly allocating upon receiving a typed array. This would need to happen if one wanted to provide unique externally defined arrays to a WASM interface. * * @private * @param {PositiveInteger} N - number of values to sum * @param {Float64Array} x - input array * @param {PositiveInteger} stride - `x` stride length * @returns {number} sum */ function wrapper( N, x, stride ) { var nbytes; var bytes; var s; // Determine the number of bytes: nbytes = x.length * x.BYTES_PER_ELEMENT; // Allocate space on the heap: bytes = dasum.malloc( nbytes ); // Copy the data to the heap: bytes.set( new Uint8Array( x.buffer ) ); // Compute the sum: s = dasum( N, bytes, stride ); // Free the memory: dasum.free( bytes ); return s; } // end FUNCTION wrapper() /** * Creates a benchmark function. * * @private * @param {PositiveInteger} len - array length * @returns {Function} benchmark function */ function createBenchmark( len ) { var x; var i; x = new Float64Array( len ); for ( i = 0; i < x.length; i++ ) { x[ i ] = ( randu()*10000.0 ) - 20000.0; } return benchmark; /** * Benchmark function. * * @private * @param {Benchmark} b - benchmark instance */ function benchmark( b ) { var y; var i; b.tic(); for ( i = 0; i < b.iterations; i++ ) { y = wrapper( x.length, x, 1 ); if ( isnan( y ) ) { b.fail( 'should not return NaN' ); } } b.toc(); if ( isnan( y ) ) { b.fail( 'should not return NaN' ); } b.pass( 'benchmark finished' ); b.end(); } // end FUNCTION benchmark() } // end FUNCTION createBenchmark() // MAIN // /** * Main execution sequence. * * @private */ function main() { var len; var min; var max; var f; var i; min = 1; // 10^min max = 6; // 10^max for ( i = min; i <= max; i++ ) { len = pow( 10, i ); f = createBenchmark( len ); bench( pkg+'::wasm,malloc:len='+len, f ); } } // end FUNCTION main() main();
lib/node_modules/@stdlib/math/base/blas/dasum/benchmark/benchmark.malloc.js
'use strict'; // MODULES // var bench = require( '@stdlib/bench' ); var randu = require( '@stdlib/math/base/random/randu' ); var isnan = require( '@stdlib/math/base/utils/is-nan' ); var pow = require( '@stdlib/math/base/special/pow' ); var pkg = require( './../package.json' ).name; var dasum = require( './../lib/wasm.js' )(); // FUNCTIONS // /** * Wraps `dasum`. * * @private * @param {PositiveInteger} N - number of values to sum * @param {Float64Array} x - input array * @param {PositiveInteger} stride - `x` stride length * @returns {number} sum */ function wrapper( N, x, stride ) { var nbytes; var bytes; var s; // Determine the number of bytes: nbytes = x.length * x.BYTES_PER_ELEMENT; // Allocate space on the heap: bytes = dasum.malloc( nbytes ); // Copy the data to the heap: bytes.set( new Uint8Array( x.buffer ) ); // Compute the sum: s = dasum( N, bytes, stride ); // Free the memory: dasum.free( bytes ); return s; } // end FUNCTION wrapper() /** * Creates a benchmark function. * * @private * @param {PositiveInteger} len - array length * @returns {Function} benchmark function */ function createBenchmark( len ) { var x; var i; x = new Float64Array( len ); for ( i = 0; i < x.length; i++ ) { x[ i ] = ( randu()*10000.0 ) - 20000.0; } return benchmark; /** * Benchmark function. * * @private * @param {Benchmark} b - benchmark instance */ function benchmark( b ) { var y; var i; b.tic(); for ( i = 0; i < b.iterations; i++ ) { y = wrapper( x.length, x, 1 ); if ( isnan( y ) ) { b.fail( 'should not return NaN' ); } } b.toc(); if ( isnan( y ) ) { b.fail( 'should not return NaN' ); } b.pass( 'benchmark finished' ); b.end(); } // end FUNCTION benchmark() } // end FUNCTION createBenchmark() // MAIN // /** * Main execution sequence. * * @private */ function main() { var len; var min; var max; var f; var i; min = 1; // 10^min max = 6; // 10^max for ( i = min; i <= max; i++ ) { len = pow( 10, i ); f = createBenchmark( len ); bench( pkg+'::wasm,malloc:len='+len, f ); } } // end FUNCTION main() main();
Update description
lib/node_modules/@stdlib/math/base/blas/dasum/benchmark/benchmark.malloc.js
Update description
<ide><path>ib/node_modules/@stdlib/math/base/blas/dasum/benchmark/benchmark.malloc.js <ide> // FUNCTIONS // <ide> <ide> /** <del>* Wraps `dasum`. <add>* Wraps `dasum`, explicitly allocating upon receiving a typed array. This would need to happen if one wanted to provide unique externally defined arrays to a WASM interface. <ide> * <ide> * @private <ide> * @param {PositiveInteger} N - number of values to sum
Java
bsd-3-clause
2a65ca4cb77856e2b1f8226a07c8e9f9f17679ee
0
g-rocket/jmonkeyengine,atomixnmc/jmonkeyengine,InShadow/jmonkeyengine,Georgeto/jmonkeyengine,rbottema/jmonkeyengine,nickschot/jmonkeyengine,aaronang/jmonkeyengine,zzuegg/jmonkeyengine,GreenCubes/jmonkeyengine,davidB/jmonkeyengine,olafmaas/jmonkeyengine,danteinforno/jmonkeyengine,atomixnmc/jmonkeyengine,shurun19851206/jMonkeyEngine,skapi1992/jmonkeyengine,phr00t/jmonkeyengine,bsmr-java/jmonkeyengine,weilichuang/jmonkeyengine,Georgeto/jmonkeyengine,d235j/jmonkeyengine,tr0k/jmonkeyengine,danteinforno/jmonkeyengine,skapi1992/jmonkeyengine,wrvangeest/jmonkeyengine,wrvangeest/jmonkeyengine,Georgeto/jmonkeyengine,Georgeto/jmonkeyengine,davidB/jmonkeyengine,davidB/jmonkeyengine,danteinforno/jmonkeyengine,phr00t/jmonkeyengine,olafmaas/jmonkeyengine,OpenGrabeso/jmonkeyengine,atomixnmc/jmonkeyengine,delftsre/jmonkeyengine,delftsre/jmonkeyengine,d235j/jmonkeyengine,mbenson/jmonkeyengine,weilichuang/jmonkeyengine,davidB/jmonkeyengine,bertleft/jmonkeyengine,mbenson/jmonkeyengine,tr0k/jmonkeyengine,shurun19851206/jMonkeyEngine,tr0k/jmonkeyengine,shurun19851206/jMonkeyEngine,weilichuang/jmonkeyengine,yetanotherindie/jMonkey-Engine,jMonkeyEngine/jmonkeyengine,atomixnmc/jmonkeyengine,sandervdo/jmonkeyengine,phr00t/jmonkeyengine,bertleft/jmonkeyengine,skapi1992/jmonkeyengine,InShadow/jmonkeyengine,wrvangeest/jmonkeyengine,mbenson/jmonkeyengine,zzuegg/jmonkeyengine,OpenGrabeso/jmonkeyengine,bsmr-java/jmonkeyengine,mbenson/jmonkeyengine,GreenCubes/jmonkeyengine,InShadow/jmonkeyengine,g-rocket/jmonkeyengine,delftsre/jmonkeyengine,OpenGrabeso/jmonkeyengine,amit2103/jmonkeyengine,g-rocket/jmonkeyengine,aaronang/jmonkeyengine,bertleft/jmonkeyengine,d235j/jmonkeyengine,aaronang/jmonkeyengine,weilichuang/jmonkeyengine,atomixnmc/jmonkeyengine,OpenGrabeso/jmonkeyengine,olafmaas/jmonkeyengine,bsmr-java/jmonkeyengine,OpenGrabeso/jmonkeyengine,wrvangeest/jmonkeyengine,delftsre/jmonkeyengine,d235j/jmonkeyengine,jMonkeyEngine/jmonkeyengine,amit2103/jmonkeyengine,g-rocket/jmonkeyengine,danteinforno/jmonkeyengine,amit2103/jmonkeyengine,nickschot/jmonkeyengine,yetanotherindie/jMonkey-Engine,weilichuang/jmonkeyengine,sandervdo/jmonkeyengine,InShadow/jmonkeyengine,atomixnmc/jmonkeyengine,mbenson/jmonkeyengine,bertleft/jmonkeyengine,amit2103/jmonkeyengine,shurun19851206/jMonkeyEngine,g-rocket/jmonkeyengine,Georgeto/jmonkeyengine,yetanotherindie/jMonkey-Engine,d235j/jmonkeyengine,g-rocket/jmonkeyengine,GreenCubes/jmonkeyengine,mbenson/jmonkeyengine,yetanotherindie/jMonkey-Engine,yetanotherindie/jMonkey-Engine,tr0k/jmonkeyengine,amit2103/jmonkeyengine,yetanotherindie/jMonkey-Engine,rbottema/jmonkeyengine,weilichuang/jmonkeyengine,bsmr-java/jmonkeyengine,shurun19851206/jMonkeyEngine,olafmaas/jmonkeyengine,rbottema/jmonkeyengine,aaronang/jmonkeyengine,shurun19851206/jMonkeyEngine,sandervdo/jmonkeyengine,davidB/jmonkeyengine,danteinforno/jmonkeyengine,d235j/jmonkeyengine,phr00t/jmonkeyengine,jMonkeyEngine/jmonkeyengine,davidB/jmonkeyengine,zzuegg/jmonkeyengine,zzuegg/jmonkeyengine,sandervdo/jmonkeyengine,nickschot/jmonkeyengine,rbottema/jmonkeyengine,nickschot/jmonkeyengine,OpenGrabeso/jmonkeyengine,amit2103/jmonkeyengine,skapi1992/jmonkeyengine,jMonkeyEngine/jmonkeyengine,danteinforno/jmonkeyengine,GreenCubes/jmonkeyengine,Georgeto/jmonkeyengine
/* * Copyright (c) 2009-2010 jMonkeyEngine * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are * met: * * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * * Neither the name of 'jMonkeyEngine' nor the names of its contributors * may be used to endorse or promote products derived from this software * without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package com.jme3.app; import com.jme3.app.state.AppStateManager; import com.jme3.input.JoyInput; import com.jme3.input.KeyInput; import com.jme3.input.MouseInput; import com.jme3.input.TouchInput; import com.jme3.math.Vector3f; import com.jme3.renderer.Camera; import com.jme3.renderer.Renderer; import com.jme3.asset.AssetManager; import com.jme3.audio.AudioContext; import com.jme3.audio.AudioRenderer; import com.jme3.audio.Listener; import com.jme3.input.InputManager; import com.jme3.renderer.RenderManager; import com.jme3.renderer.ViewPort; import com.jme3.system.AppSettings; import com.jme3.system.JmeCanvasContext; import com.jme3.system.JmeContext; import java.net.MalformedURLException; import java.net.URL; import java.util.concurrent.Callable; import java.util.concurrent.ConcurrentLinkedQueue; import java.util.concurrent.Future; import java.util.logging.Level; import java.util.logging.Logger; import com.jme3.system.JmeContext.Type; import com.jme3.system.JmeSystem; import com.jme3.system.SystemListener; import com.jme3.system.Timer; /** * The <code>Application</code> class represents an instance of a * real-time 3D rendering jME application. * * An <code>Application</code> provides all the tools that are commonly used in jME3 * applications. * * jME3 applications should extend this class and call start() to begin the * application. * */ public class Application implements SystemListener { private static final Logger logger = Logger.getLogger(Application.class.getName()); protected AssetManager assetManager; protected AudioRenderer audioRenderer; protected Renderer renderer; protected RenderManager renderManager; protected ViewPort viewPort; protected ViewPort guiViewPort; protected JmeContext context; protected AppSettings settings; protected Timer timer; protected Camera cam; protected Listener listener; protected boolean inputEnabled = true; protected boolean pauseOnFocus = true; protected float speed = 1f; protected boolean paused = false; protected MouseInput mouseInput; protected KeyInput keyInput; protected JoyInput joyInput; protected TouchInput touchInput; protected InputManager inputManager; protected AppStateManager stateManager; private final ConcurrentLinkedQueue<AppTask<?>> taskQueue = new ConcurrentLinkedQueue<AppTask<?>>(); /** * Create a new instance of <code>Application</code>. */ public Application(){ } /** * Returns true if pause on lost focus is enabled, false otherwise. * * @return true if pause on lost focus is enabled * * @see #setPauseOnLostFocus(boolean) */ public boolean isPauseOnLostFocus() { return pauseOnFocus; } /** * Enable or disable pause on lost focus. * <p> * By default, pause on lost focus is enabled. * If enabled, the application will stop updating * when it loses focus or becomes inactive (e.g. alt-tab). * For online or real-time applications, this might not be preferable, * so this feature should be set to disabled. For other applications, * it is best to keep it on so that CPU usage is not used when * not necessary. * * @param pauseOnLostFocus True to enable pause on lost focus, false * otherwise. */ public void setPauseOnLostFocus(boolean pauseOnLostFocus) { this.pauseOnFocus = pauseOnLostFocus; } @Deprecated public void setAssetManager(AssetManager assetManager){ if (this.assetManager != null) throw new IllegalStateException("Can only set asset manager" + " before initialization."); this.assetManager = assetManager; } private void initAssetManager(){ if (settings != null){ String assetCfg = settings.getString("AssetConfigURL"); if (assetCfg != null){ URL url = null; try { url = new URL(assetCfg); } catch (MalformedURLException ex) { } if (url == null) { url = Application.class.getClassLoader().getResource(assetCfg); if (url == null) { logger.log(Level.SEVERE, "Unable to access AssetConfigURL in asset config:{0}", assetCfg); return; } } assetManager = JmeSystem.newAssetManager(url); } } if (assetManager == null){ assetManager = JmeSystem.newAssetManager( Thread.currentThread().getContextClassLoader() .getResource("com/jme3/asset/Desktop.cfg")); } } /** * Set the display settings to define the display created. Examples of * display parameters include display pixel width and height, * color bit depth, z-buffer bits, anti-aliasing samples, and update frequency. * * @param settings The settings to set. */ public void setSettings(AppSettings settings){ this.settings = settings; if (context != null && settings.useInput() != inputEnabled){ // may need to create or destroy input based // on settings change inputEnabled = !inputEnabled; if (inputEnabled){ initInput(); }else{ destroyInput(); } }else{ inputEnabled = settings.useInput(); } } /** * Sets the Timer implementation that will be used for calculating * frame times. By default, Application will use the Timer as returned * by the current JmeContext implementation. */ public void setTimer(Timer timer){ this.timer = timer; if (timer != null) { timer.reset(); } if (renderManager != null) { renderManager.setTimer(timer); } } private void initDisplay(){ // aquire important objects // from the context settings = context.getSettings(); // Only reset the timer if a user has not already provided one if (timer == null) { timer = context.getTimer(); } renderer = context.getRenderer(); } private void initAudio(){ if (settings.getAudioRenderer() != null && context.getType() != Type.Headless){ audioRenderer = JmeSystem.newAudioRenderer(settings); audioRenderer.initialize(); AudioContext.setAudioRenderer(audioRenderer); listener = new Listener(); audioRenderer.setListener(listener); } } /** * Creates the camera to use for rendering. Default values are perspective * projection with 45° field of view, with near and far values 1 and 1000 * units respectively. */ private void initCamera(){ cam = new Camera(settings.getWidth(), settings.getHeight()); cam.setFrustumPerspective(45f, (float)cam.getWidth() / cam.getHeight(), 1f, 1000f); cam.setLocation(new Vector3f(0f, 0f, 10f)); cam.lookAt(new Vector3f(0f, 0f, 0f), Vector3f.UNIT_Y); renderManager = new RenderManager(renderer); //Remy - 09/14/2010 setted the timer in the renderManager renderManager.setTimer(timer); viewPort = renderManager.createMainView("Default", cam); viewPort.setClearFlags(true, true, true); // Create a new cam for the gui Camera guiCam = new Camera(settings.getWidth(), settings.getHeight()); guiViewPort = renderManager.createPostView("Gui Default", guiCam); guiViewPort.setClearFlags(false, false, false); } /** * Initializes mouse and keyboard input. Also * initializes joystick input if joysticks are enabled in the * AppSettings. */ private void initInput(){ mouseInput = context.getMouseInput(); if (mouseInput != null) mouseInput.initialize(); keyInput = context.getKeyInput(); if (keyInput != null) keyInput.initialize(); touchInput = context.getTouchInput(); if (touchInput != null) touchInput.initialize(); if (!settings.getBoolean("DisableJoysticks")){ joyInput = context.getJoyInput(); if (joyInput != null) joyInput.initialize(); } inputManager = new InputManager(mouseInput, keyInput, joyInput, touchInput); } private void initStateManager(){ stateManager = new AppStateManager(this); } /** * @return The {@link AssetManager asset manager} for this application. */ public AssetManager getAssetManager(){ return assetManager; } /** * @return the {@link InputManager input manager}. */ public InputManager getInputManager(){ return inputManager; } /** * @return the {@link AppStateManager app state manager} */ public AppStateManager getStateManager() { return stateManager; } /** * @return the {@link RenderManager render manager} */ public RenderManager getRenderManager() { return renderManager; } /** * @return The {@link Renderer renderer} for the application */ public Renderer getRenderer(){ return renderer; } /** * @return The {@link AudioRenderer audio renderer} for the application */ public AudioRenderer getAudioRenderer() { return audioRenderer; } /** * @return The {@link Listener listener} object for audio */ public Listener getListener() { return listener; } /** * @return The {@link JmeContext display context} for the application */ public JmeContext getContext(){ return context; } /** * @return The {@link Camera camera} for the application */ public Camera getCamera(){ return cam; } /** * Starts the application in {@link Type#Display display} mode. * * @see #start(com.jme3.system.JmeContext.Type) */ public void start(){ start(JmeContext.Type.Display); } /** * Starts the application. * Creating a rendering context and executing * the main loop in a separate thread. */ public void start(JmeContext.Type contextType){ if (context != null && context.isCreated()){ logger.warning("start() called when application already created!"); return; } if (settings == null){ settings = new AppSettings(true); } logger.log(Level.FINE, "Starting application: {0}", getClass().getName()); context = JmeSystem.newContext(settings, contextType); context.setSystemListener(this); context.create(false); } /** * Initializes the application's canvas for use. * <p> * After calling this method, cast the {@link #getContext() context} to * {@link JmeCanvasContext}, * then acquire the canvas with {@link JmeCanvasContext#getCanvas() } * and attach it to an AWT/Swing Frame. * The rendering thread will start when the canvas becomes visible on * screen, however if you wish to start the context immediately you * may call {@link #startCanvas() } to force the rendering thread * to start. * * @see JmeCanvasContext * @see Type#Canvas */ public void createCanvas(){ if (context != null && context.isCreated()){ logger.warning("createCanvas() called when application already created!"); return; } if (settings == null){ settings = new AppSettings(true); } logger.log(Level.FINE, "Starting application: {0}", getClass().getName()); context = JmeSystem.newContext(settings, JmeContext.Type.Canvas); context.setSystemListener(this); } /** * Starts the rendering thread after createCanvas() has been called. * <p> * Same as calling startCanvas(false) * * @see #startCanvas(boolean) */ public void startCanvas(){ startCanvas(false); } /** * Starts the rendering thread after createCanvas() has been called. * <p> * Calling this method is optional, the canvas will start automatically * when it becomes visible. * * @param waitFor If true, the current thread will block until the * rendering thread is running */ public void startCanvas(boolean waitFor){ context.create(waitFor); } /** * Internal use only. */ public void reshape(int w, int h){ renderManager.notifyReshape(w, h); } /** * Restarts the context, applying any changed settings. * <p> * Changes to the {@link AppSettings} of this Application are not * applied immediately; calling this method forces the context * to restart, applying the new settings. */ public void restart(){ context.setSettings(settings); context.restart(); } /** * * Requests the context to close, shutting down the main loop * and making necessary cleanup operations. * * Same as calling stop(false) * * @see #stop(boolean) */ public void stop(){ stop(false); } /** * Requests the context to close, shutting down the main loop * and making necessary cleanup operations. * After the application has stopped, it cannot be used anymore. */ public void stop(boolean waitFor){ logger.log(Level.FINE, "Closing application: {0}", getClass().getName()); context.destroy(waitFor); } /** * Do not call manually. * Callback from ContextListener. * <p> * Initializes the <code>Application</code>, by creating a display and * default camera. If display settings are not specified, a default * 640x480 display is created. Default values are used for the camera; * perspective projection with 45° field of view, with near * and far values 1 and 1000 units respectively. */ public void initialize(){ if (assetManager == null){ initAssetManager(); } initDisplay(); initCamera(); if (inputEnabled){ initInput(); } initAudio(); initStateManager(); // update timer so that the next delta is not too large // timer.update(); timer.reset(); // user code here.. } /** * Internal use only. */ public void handleError(String errMsg, Throwable t){ logger.log(Level.SEVERE, errMsg, t); // user should add additional code to handle the error. stop(); // stop the application } /** * Internal use only. */ public void gainFocus(){ if (pauseOnFocus){ paused = false; context.setAutoFlushFrames(true); if (inputManager != null) inputManager.reset(); } } /** * Internal use only. */ public void loseFocus(){ if (pauseOnFocus){ paused = true; context.setAutoFlushFrames(false); } } /** * Internal use only. */ public void requestClose(boolean esc){ context.destroy(false); } /** * Enqueues a task/callable object to execute in the jME3 * rendering thread. */ public <V> Future<V> enqueue(Callable<V> callable) { AppTask<V> task = new AppTask<V>(callable); taskQueue.add(task); return task; } /** * Do not call manually. * Callback from ContextListener. */ public void update(){ // Make sure the audio renderer is available to callables AudioContext.setAudioRenderer(audioRenderer); AppTask<?> task = taskQueue.poll(); toploop: do { if (task == null) break; while (task.isCancelled()) { task = taskQueue.poll(); if (task == null) break toploop; } task.invoke(); } while (((task = taskQueue.poll()) != null)); if (speed == 0 || paused) return; timer.update(); if (inputEnabled){ inputManager.update(timer.getTimePerFrame()); } if (audioRenderer != null){ audioRenderer.update(timer.getTimePerFrame()); } // user code here.. } protected void destroyInput(){ if (mouseInput != null) mouseInput.destroy(); if (keyInput != null) keyInput.destroy(); if (joyInput != null) joyInput.destroy(); if (touchInput != null) touchInput.destroy(); inputManager = null; } /** * Do not call manually. * Callback from ContextListener. */ public void destroy(){ stateManager.cleanup(); destroyInput(); if (audioRenderer != null) audioRenderer.cleanup(); timer.reset(); } public ViewPort getGuiViewPort() { return guiViewPort; } public ViewPort getViewPort() { return viewPort; } }
engine/src/core/com/jme3/app/Application.java
/* * Copyright (c) 2009-2010 jMonkeyEngine * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are * met: * * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * * Neither the name of 'jMonkeyEngine' nor the names of its contributors * may be used to endorse or promote products derived from this software * without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package com.jme3.app; import com.jme3.app.state.AppStateManager; import com.jme3.input.JoyInput; import com.jme3.input.KeyInput; import com.jme3.input.MouseInput; import com.jme3.input.TouchInput; import com.jme3.math.Vector3f; import com.jme3.renderer.Camera; import com.jme3.renderer.Renderer; import com.jme3.asset.AssetManager; import com.jme3.audio.AudioContext; import com.jme3.audio.AudioRenderer; import com.jme3.audio.Listener; import com.jme3.input.InputManager; import com.jme3.renderer.RenderManager; import com.jme3.renderer.ViewPort; import com.jme3.system.AppSettings; import com.jme3.system.JmeCanvasContext; import com.jme3.system.JmeContext; import java.net.MalformedURLException; import java.net.URL; import java.util.concurrent.Callable; import java.util.concurrent.ConcurrentLinkedQueue; import java.util.concurrent.Future; import java.util.logging.Level; import java.util.logging.Logger; import com.jme3.system.JmeContext.Type; import com.jme3.system.JmeSystem; import com.jme3.system.SystemListener; import com.jme3.system.Timer; /** * The <code>Application</code> class represents an instance of a * real-time 3D rendering jME application. * * An <code>Application</code> provides all the tools that are commonly used in jME3 * applications. * * jME3 applications should extend this class and call start() to begin the * application. * */ public class Application implements SystemListener { private static final Logger logger = Logger.getLogger(Application.class.getName()); protected AssetManager assetManager; protected AudioRenderer audioRenderer; protected Renderer renderer; protected RenderManager renderManager; protected ViewPort viewPort; protected ViewPort guiViewPort; protected JmeContext context; protected AppSettings settings; protected Timer timer; protected Camera cam; protected Listener listener; protected boolean inputEnabled = true; protected boolean pauseOnFocus = true; protected float speed = 1f; protected boolean paused = false; protected MouseInput mouseInput; protected KeyInput keyInput; protected JoyInput joyInput; protected TouchInput touchInput; protected InputManager inputManager; protected AppStateManager stateManager; private final ConcurrentLinkedQueue<AppTask<?>> taskQueue = new ConcurrentLinkedQueue<AppTask<?>>(); /** * Create a new instance of <code>Application</code>. */ public Application(){ } /** * Returns true if pause on lost focus is enabled, false otherwise. * * @return true if pause on lost focus is enabled * * @see #setPauseOnLostFocus(boolean) */ public boolean isPauseOnLostFocus() { return pauseOnFocus; } /** * Enable or disable pause on lost focus. * <p> * By default, pause on lost focus is enabled. * If enabled, the application will stop updating * when it loses focus or becomes inactive (e.g. alt-tab). * For online or real-time applications, this might not be preferable, * so this feature should be set to disabled. For other applications, * it is best to keep it on so that CPU usage is not used when * not necessary. * * @param pauseOnLostFocus True to enable pause on lost focus, false * otherwise. */ public void setPauseOnLostFocus(boolean pauseOnLostFocus) { this.pauseOnFocus = pauseOnLostFocus; } @Deprecated public void setAssetManager(AssetManager assetManager){ if (this.assetManager != null) throw new IllegalStateException("Can only set asset manager" + " before initialization."); this.assetManager = assetManager; } private void initAssetManager(){ if (settings != null){ String assetCfg = settings.getString("AssetConfigURL"); if (assetCfg != null){ URL url = null; try { url = new URL(assetCfg); } catch (MalformedURLException ex) { } if (url == null) { url = Application.class.getClassLoader().getResource(assetCfg); if (url == null) { logger.log(Level.SEVERE, "Unable to access AssetConfigURL in asset config:{0}", assetCfg); return; } } assetManager = JmeSystem.newAssetManager(url); } } if (assetManager == null){ assetManager = JmeSystem.newAssetManager( Thread.currentThread().getContextClassLoader() .getResource("com/jme3/asset/Desktop.cfg")); } } /** * Set the display settings to define the display created. Examples of * display parameters include display pixel width and height, * color bit depth, z-buffer bits, anti-aliasing samples, and update frequency. * * @param settings The settings to set. */ public void setSettings(AppSettings settings){ this.settings = settings; if (context != null && settings.useInput() != inputEnabled){ // may need to create or destroy input based // on settings change inputEnabled = !inputEnabled; if (inputEnabled){ initInput(); }else{ destroyInput(); } }else{ inputEnabled = settings.useInput(); } } private void initDisplay(){ // aquire important objects // from the context settings = context.getSettings(); timer = context.getTimer(); renderer = context.getRenderer(); } private void initAudio(){ if (settings.getAudioRenderer() != null && context.getType() != Type.Headless){ audioRenderer = JmeSystem.newAudioRenderer(settings); audioRenderer.initialize(); AudioContext.setAudioRenderer(audioRenderer); listener = new Listener(); audioRenderer.setListener(listener); } } /** * Creates the camera to use for rendering. Default values are perspective * projection with 45° field of view, with near and far values 1 and 1000 * units respectively. */ private void initCamera(){ cam = new Camera(settings.getWidth(), settings.getHeight()); cam.setFrustumPerspective(45f, (float)cam.getWidth() / cam.getHeight(), 1f, 1000f); cam.setLocation(new Vector3f(0f, 0f, 10f)); cam.lookAt(new Vector3f(0f, 0f, 0f), Vector3f.UNIT_Y); renderManager = new RenderManager(renderer); //Remy - 09/14/2010 setted the timer in the renderManager renderManager.setTimer(timer); viewPort = renderManager.createMainView("Default", cam); viewPort.setClearFlags(true, true, true); // Create a new cam for the gui Camera guiCam = new Camera(settings.getWidth(), settings.getHeight()); guiViewPort = renderManager.createPostView("Gui Default", guiCam); guiViewPort.setClearFlags(false, false, false); } /** * Initializes mouse and keyboard input. Also * initializes joystick input if joysticks are enabled in the * AppSettings. */ private void initInput(){ mouseInput = context.getMouseInput(); if (mouseInput != null) mouseInput.initialize(); keyInput = context.getKeyInput(); if (keyInput != null) keyInput.initialize(); touchInput = context.getTouchInput(); if (touchInput != null) touchInput.initialize(); if (!settings.getBoolean("DisableJoysticks")){ joyInput = context.getJoyInput(); if (joyInput != null) joyInput.initialize(); } inputManager = new InputManager(mouseInput, keyInput, joyInput, touchInput); } private void initStateManager(){ stateManager = new AppStateManager(this); } /** * @return The {@link AssetManager asset manager} for this application. */ public AssetManager getAssetManager(){ return assetManager; } /** * @return the {@link InputManager input manager}. */ public InputManager getInputManager(){ return inputManager; } /** * @return the {@link AppStateManager app state manager} */ public AppStateManager getStateManager() { return stateManager; } /** * @return the {@link RenderManager render manager} */ public RenderManager getRenderManager() { return renderManager; } /** * @return The {@link Renderer renderer} for the application */ public Renderer getRenderer(){ return renderer; } /** * @return The {@link AudioRenderer audio renderer} for the application */ public AudioRenderer getAudioRenderer() { return audioRenderer; } /** * @return The {@link Listener listener} object for audio */ public Listener getListener() { return listener; } /** * @return The {@link JmeContext display context} for the application */ public JmeContext getContext(){ return context; } /** * @return The {@link Camera camera} for the application */ public Camera getCamera(){ return cam; } /** * Starts the application in {@link Type#Display display} mode. * * @see #start(com.jme3.system.JmeContext.Type) */ public void start(){ start(JmeContext.Type.Display); } /** * Starts the application. * Creating a rendering context and executing * the main loop in a separate thread. */ public void start(JmeContext.Type contextType){ if (context != null && context.isCreated()){ logger.warning("start() called when application already created!"); return; } if (settings == null){ settings = new AppSettings(true); } logger.log(Level.FINE, "Starting application: {0}", getClass().getName()); context = JmeSystem.newContext(settings, contextType); context.setSystemListener(this); context.create(false); } /** * Initializes the application's canvas for use. * <p> * After calling this method, cast the {@link #getContext() context} to * {@link JmeCanvasContext}, * then acquire the canvas with {@link JmeCanvasContext#getCanvas() } * and attach it to an AWT/Swing Frame. * The rendering thread will start when the canvas becomes visible on * screen, however if you wish to start the context immediately you * may call {@link #startCanvas() } to force the rendering thread * to start. * * @see JmeCanvasContext * @see Type#Canvas */ public void createCanvas(){ if (context != null && context.isCreated()){ logger.warning("createCanvas() called when application already created!"); return; } if (settings == null){ settings = new AppSettings(true); } logger.log(Level.FINE, "Starting application: {0}", getClass().getName()); context = JmeSystem.newContext(settings, JmeContext.Type.Canvas); context.setSystemListener(this); } /** * Starts the rendering thread after createCanvas() has been called. * <p> * Same as calling startCanvas(false) * * @see #startCanvas(boolean) */ public void startCanvas(){ startCanvas(false); } /** * Starts the rendering thread after createCanvas() has been called. * <p> * Calling this method is optional, the canvas will start automatically * when it becomes visible. * * @param waitFor If true, the current thread will block until the * rendering thread is running */ public void startCanvas(boolean waitFor){ context.create(waitFor); } /** * Internal use only. */ public void reshape(int w, int h){ renderManager.notifyReshape(w, h); } /** * Restarts the context, applying any changed settings. * <p> * Changes to the {@link AppSettings} of this Application are not * applied immediately; calling this method forces the context * to restart, applying the new settings. */ public void restart(){ context.setSettings(settings); context.restart(); } /** * * Requests the context to close, shutting down the main loop * and making necessary cleanup operations. * * Same as calling stop(false) * * @see #stop(boolean) */ public void stop(){ stop(false); } /** * Requests the context to close, shutting down the main loop * and making necessary cleanup operations. * After the application has stopped, it cannot be used anymore. */ public void stop(boolean waitFor){ logger.log(Level.FINE, "Closing application: {0}", getClass().getName()); context.destroy(waitFor); } /** * Do not call manually. * Callback from ContextListener. * <p> * Initializes the <code>Application</code>, by creating a display and * default camera. If display settings are not specified, a default * 640x480 display is created. Default values are used for the camera; * perspective projection with 45° field of view, with near * and far values 1 and 1000 units respectively. */ public void initialize(){ if (assetManager == null){ initAssetManager(); } initDisplay(); initCamera(); if (inputEnabled){ initInput(); } initAudio(); initStateManager(); // update timer so that the next delta is not too large // timer.update(); timer.reset(); // user code here.. } /** * Internal use only. */ public void handleError(String errMsg, Throwable t){ logger.log(Level.SEVERE, errMsg, t); // user should add additional code to handle the error. stop(); // stop the application } /** * Internal use only. */ public void gainFocus(){ if (pauseOnFocus){ paused = false; context.setAutoFlushFrames(true); if (inputManager != null) inputManager.reset(); } } /** * Internal use only. */ public void loseFocus(){ if (pauseOnFocus){ paused = true; context.setAutoFlushFrames(false); } } /** * Internal use only. */ public void requestClose(boolean esc){ context.destroy(false); } /** * Enqueues a task/callable object to execute in the jME3 * rendering thread. */ public <V> Future<V> enqueue(Callable<V> callable) { AppTask<V> task = new AppTask<V>(callable); taskQueue.add(task); return task; } /** * Do not call manually. * Callback from ContextListener. */ public void update(){ // Make sure the audio renderer is available to callables AudioContext.setAudioRenderer(audioRenderer); AppTask<?> task = taskQueue.poll(); toploop: do { if (task == null) break; while (task.isCancelled()) { task = taskQueue.poll(); if (task == null) break toploop; } task.invoke(); } while (((task = taskQueue.poll()) != null)); if (speed == 0 || paused) return; timer.update(); if (inputEnabled){ inputManager.update(timer.getTimePerFrame()); } if (audioRenderer != null){ audioRenderer.update(timer.getTimePerFrame()); } // user code here.. } protected void destroyInput(){ if (mouseInput != null) mouseInput.destroy(); if (keyInput != null) keyInput.destroy(); if (joyInput != null) joyInput.destroy(); if (touchInput != null) touchInput.destroy(); inputManager = null; } /** * Do not call manually. * Callback from ContextListener. */ public void destroy(){ stateManager.cleanup(); destroyInput(); if (audioRenderer != null) audioRenderer.cleanup(); timer.reset(); } public ViewPort getGuiViewPort() { return guiViewPort; } public ViewPort getViewPort() { return viewPort; } }
Added a setTimer() method for switching the default Timer implementation. git-svn-id: f9411aee4f13664f2fc428a5b3e824fe43a079a3@8523 75d07b2b-3a1a-0410-a2c5-0572b91ccdca
engine/src/core/com/jme3/app/Application.java
Added a setTimer() method for switching the default Timer implementation.
<ide><path>ngine/src/core/com/jme3/app/Application.java <ide> } <ide> } <ide> <add> /** <add> * Sets the Timer implementation that will be used for calculating <add> * frame times. By default, Application will use the Timer as returned <add> * by the current JmeContext implementation. <add> */ <add> public void setTimer(Timer timer){ <add> this.timer = timer; <add> <add> if (timer != null) { <add> timer.reset(); <add> } <add> <add> if (renderManager != null) { <add> renderManager.setTimer(timer); <add> } <add> } <add> <ide> private void initDisplay(){ <ide> // aquire important objects <ide> // from the context <ide> settings = context.getSettings(); <del> timer = context.getTimer(); <add> <add> // Only reset the timer if a user has not already provided one <add> if (timer == null) { <add> timer = context.getTimer(); <add> } <ide> <ide> renderer = context.getRenderer(); <ide> }
Java
mit
69cb1cd176e26919371573aa19f4352931e2f841
0
Monaden/automatabuilder
/* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ package automatabuilder; import interfaces.IState; import interfaces.ITransition; import java.util.Vector; import org.junit.*; import static org.junit.Assert.*; /** * * @author Adam */ public class StateTest { private Vector<ITransition> transitions; private IState q0; private IState q1; private Symbol a; @Before public void setUp() throws Exception { transitions = new Vector<>(); } private void buildStateWithOneTransitions(){ a = new Symbol("a"); q0 = new State(transitions, false, "q0"); q1 = new State(new Vector(), true, "q1"); Transition t1 = new Transition(q1, a); transitions.add(t1); } @Test public void testGetName() { State state = new State(transitions, true, "q0"); String expResult = "q0"; assertEquals(expResult, state.getName()); } @Test public void testIsFinal() { State state = new State(transitions, true, "q0"); assertEquals(true, state.isFinal()); } @Test public void testTransition() { buildStateWithOneTransitions(); IState result = q0.transition(a); assertEquals(q1, result); } @Test public void testToString() { buildStateWithOneTransitions(); assertEquals("q0:(a->q1)", q0.toString()); assertEquals("*q1:()",q1.toString()); } }
src/test/java/automatabuilder/StateTest.java
/* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ package automatabuilder; import interfaces.IState; import interfaces.ITransition; import java.util.Vector; import org.junit.Test; import static org.junit.Assert.*; /** * * @author Adam */ public class StateTest { public StateTest() { } /** * Test of getName method, of class State. */ @org.junit.Test public void testGetName() { System.out.println("getName"); Vector<ITransition> ts = new Vector(); State instance = new State(ts, true, "q0"); String expResult = "q0"; String result = instance.getName(); assertEquals(expResult, result); } /** * Test of isFinal method, of class State. */ @org.junit.Test public void testIsFinal() { System.out.println("isFinal"); Vector<ITransition> ts = new Vector(); State instance = new State(ts, true, "q0"); boolean expResult = true; boolean result = instance.isFinal(); assertEquals(expResult, result); } /** * Test of transition method, of class State. */ @org.junit.Test public void testTransition() { System.out.println("transition"); Symbol a = new Symbol("a"); Vector<ITransition> ts = new Vector(); State q0 = new State(ts, true, "q0"); State q1 = new State(new Vector(), true, "q1"); Transition t1 = new Transition(q1, a); ts.add(t1); IState result = q0.transition(a); assertEquals(q1, result); } /** * Test of toString method, of class State. */ @org.junit.Test public void testToString() { System.out.println("toString"); Symbol a = new Symbol("a"); Vector<ITransition> ts = new Vector(); State q0 = new State(ts, false, "q0"); State q1 = new State(new Vector(), true, "q1"); Transition t1 = new Transition(q1, a); ts.add(t1); String expResult1 = "q0:(a->q1)"; String expResult2 = "*q1:()"; String result1 = q0.toString(); String result2 = q1.toString(); System.out.println(result2); System.out.println(expResult2); assertEquals(expResult1, result1); assertEquals(expResult2, result2); } }
removed: comment as the code was self-explanatory that it is the State class which is under test, and the test methods describe the functionallity added: setUp method which is run before every test method. duplicate code could then be removed. added: a build method which builds a state with one state. duplicate code could then be removed from testTransitions and toString method.
src/test/java/automatabuilder/StateTest.java
removed: comment as the code was self-explanatory that it is the State class which is under test, and the test methods describe the functionallity
<ide><path>rc/test/java/automatabuilder/StateTest.java <ide> import interfaces.IState; <ide> import interfaces.ITransition; <ide> import java.util.Vector; <del>import org.junit.Test; <add>import org.junit.*; <add> <ide> import static org.junit.Assert.*; <ide> <ide> /** <ide> * @author Adam <ide> */ <ide> public class StateTest { <del> <del> public StateTest() { <add> <add> private Vector<ITransition> transitions; <add> <add> private IState q0; <add> private IState q1; <add> private Symbol a; <add> <add> <add> @Before <add> public void setUp() throws Exception { <add> transitions = new Vector<>(); <ide> } <ide> <del> /** <del> * Test of getName method, of class State. <del> */ <del> @org.junit.Test <add> private void buildStateWithOneTransitions(){ <add> a = new Symbol("a"); <add> <add> q0 = new State(transitions, false, "q0"); <add> q1 = new State(new Vector(), true, "q1"); <add> <add> Transition t1 = new Transition(q1, a); <add> transitions.add(t1); <add> <add> } <add> <add> @Test <ide> public void testGetName() { <del> System.out.println("getName"); <del> Vector<ITransition> ts = new Vector(); <del> <del> State instance = new State(ts, true, "q0"); <add> State state = new State(transitions, true, "q0"); <ide> String expResult = "q0"; <ide> <del> String result = instance.getName(); <del> assertEquals(expResult, result); <add> assertEquals(expResult, state.getName()); <ide> } <ide> <del> /** <del> * Test of isFinal method, of class State. <del> */ <del> @org.junit.Test <add> @Test <ide> public void testIsFinal() { <del> <del> System.out.println("isFinal"); <del> <del> Vector<ITransition> ts = new Vector(); <del> <del> State instance = new State(ts, true, "q0"); <del> boolean expResult = true; <del> boolean result = instance.isFinal(); <del> assertEquals(expResult, result); <add> State state = new State(transitions, true, "q0"); <add> assertEquals(true, state.isFinal()); <ide> } <ide> <del> /** <del> * Test of transition method, of class State. <del> */ <del> @org.junit.Test <add> @Test <ide> public void testTransition() { <del> System.out.println("transition"); <del> Symbol a = new Symbol("a"); <del> Vector<ITransition> ts = new Vector(); <del> <del> State q0 = new State(ts, true, "q0"); <del> State q1 = new State(new Vector(), true, "q1"); <del> <del> Transition t1 = new Transition(q1, a); <del> ts.add(t1); <del> <add> buildStateWithOneTransitions(); <ide> IState result = q0.transition(a); <ide> assertEquals(q1, result); <ide> } <ide> <del> /** <del> * Test of toString method, of class State. <del> */ <del> @org.junit.Test <add> @Test <ide> public void testToString() { <del> System.out.println("toString"); <del> Symbol a = new Symbol("a"); <del> Vector<ITransition> ts = new Vector(); <del> <del> State q0 = new State(ts, false, "q0"); <del> State q1 = new State(new Vector(), true, "q1"); <del> <del> Transition t1 = new Transition(q1, a); <del> ts.add(t1); <del> <del> String expResult1 = "q0:(a->q1)"; <del> String expResult2 = "*q1:()"; <del> String result1 = q0.toString(); <del> String result2 = q1.toString(); <del> System.out.println(result2); <del> System.out.println(expResult2); <del> assertEquals(expResult1, result1); <del> assertEquals(expResult2, result2); <add> buildStateWithOneTransitions(); <add> assertEquals("q0:(a->q1)", q0.toString()); <add> assertEquals("*q1:()",q1.toString()); <ide> } <del> <add> <ide> }
Java
bsd-3-clause
47ba8558f280e1d9e43fa27080d1a5d2a7042b93
0
sebastiangraf/treetank,sebastiangraf/treetank,sebastiangraf/treetank
/** * */ package org.treetank.access; import static org.testng.AssertJUnit.assertEquals; import static org.treetank.CoreTestHelper.getFakedStructure; import org.testng.annotations.AfterMethod; import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; import org.treetank.exception.TTIOException; import org.treetank.io.IBackendReader; /** * * Test-case for PageReadTrx. * * @author Sebastian Graf, University of Konstanz * */ public class PageReadTrxTest { /** * @throws java.lang.Exception */ @BeforeMethod public void setUp() throws Exception { } /** * @throws java.lang.Exception */ @AfterMethod public void tearDown() throws Exception { } /** * Test method for {@link org.treetank.access.PageReadTrx#getNode(long)}. */ @Test public void testGetNode() { // fail("Not yet implemented"); } /** * Test method for {@link org.treetank.access.PageReadTrx#close()}. */ @Test public void testClose() { // fail("Not yet implemented"); } /** * Test method for {@link org.treetank.access.PageReadTrx#getActualRevisionRootPage()}. */ @Test public void testGetActualRevisionRootPage() { // fail("Not yet implemented"); } /** * Test method for {@link org.treetank.access.PageReadTrx#isClosed()}. */ @Test public void testIsClosed() { // fail("Not yet implemented"); } /** * Test method for {@link org.treetank.access.PageReadTrx#checkItemIfDeleted(org.treetank.api.INode)}. */ @Test public void testCheckItemIfDeleted() { // fail("Not yet implemented"); } /** * Test method for {@link org.treetank.access.PageReadTrx#getSnapshotPages(long)}. */ @Test public void testGetSnapshotPages() { // fail("Not yet implemented"); } /** * Test method for * {@link org.treetank.access.PageReadTrx#dereferenceLeafOfTree(org.treetank.io.IBackendReader, long, long)} * . */ @Test public void testDereferenceLeafOfTree() throws TTIOException { int[] offsets = new int[5]; IBackendReader reader = getFakedStructure(offsets); long key = PageReadTrx.dereferenceLeafOfTree(reader, 1, 0); // 6 is base key because of 5 layers plus the 1 as start key assertEquals(6, key); offsets[4] = 127; reader = getFakedStructure(offsets); key = PageReadTrx.dereferenceLeafOfTree(reader, 1, 127); // 6 as base plus 127 as offset on last page assertEquals(133, key); offsets[3] = 1; offsets[4] = 0; reader = getFakedStructure(offsets); key = PageReadTrx.dereferenceLeafOfTree(reader, 1, 128); // 6 as base plus one additional offset on one level above assertEquals(7, key); offsets[3] = 127; offsets[4] = 127; reader = getFakedStructure(offsets); key = PageReadTrx.dereferenceLeafOfTree(reader, 1, 16383); // 6 as base plus two times 127 as offsets on level above assertEquals(260, key); offsets[2] = 1; offsets[3] = 0; offsets[4] = 0; reader = getFakedStructure(offsets); key = PageReadTrx.dereferenceLeafOfTree(reader, 1, 16384); // 6 as base plus one additional offset on two levels above assertEquals(7, key); offsets[2] = 127; offsets[3] = 127; offsets[4] = 127; reader = getFakedStructure(offsets); key = PageReadTrx.dereferenceLeafOfTree(reader, 1, 2097151); // 6 as base plus three times 127 as offsets on levels above assertEquals(387, key); offsets[1] = 1; offsets[2] = 0; offsets[3] = 0; offsets[4] = 0; reader = getFakedStructure(offsets); key = PageReadTrx.dereferenceLeafOfTree(reader, 1, 2097152); // 6 as base plus one additional offset on three levels above assertEquals(7, key); offsets[1] = 127; offsets[2] = 127; offsets[3] = 127; offsets[4] = 127; reader = getFakedStructure(offsets); key = PageReadTrx.dereferenceLeafOfTree(reader, 1, 268435455); // 6 as base plus four times 127 as offsets on levels above assertEquals(514, key); offsets[0] = 1; offsets[1] = 0; offsets[2] = 0; offsets[3] = 0; offsets[4] = 0; reader = getFakedStructure(offsets); key = PageReadTrx.dereferenceLeafOfTree(reader, 1, 268435456); // 6 as base plus one additional offset on three levels above assertEquals(7, key); offsets[0] = 127; offsets[1] = 127; offsets[2] = 127; offsets[3] = 127; offsets[4] = 127; reader = getFakedStructure(offsets); key = PageReadTrx.dereferenceLeafOfTree(reader, 1, 34359738367l); // 6 as base plus five times 127 as offsets on levels above assertEquals(641, key); // false offset, not existing offsets[0] = 0; reader = getFakedStructure(offsets); key = PageReadTrx.dereferenceLeafOfTree(reader, 1, 34359738367l); assertEquals(-1, key); } /** * Test method for {@link org.treetank.access.PageReadTrx#nodePageKey(long)}. */ @Test public void testNodePageKey() { assertEquals(0, PageReadTrx.nodePageKey(0)); assertEquals(1, PageReadTrx.nodePageKey(128)); assertEquals(127, PageReadTrx.nodePageKey(16383)); } /** * Test method for {@link org.treetank.access.PageReadTrx#nodePageOffset(long)}. */ @Test public void testNodePageOffset() { assertEquals(0, PageReadTrx.nodePageKey(0)); assertEquals(1, PageReadTrx.nodePageKey(128)); assertEquals(127, PageReadTrx.nodePageKey(16383)); } /** * Test method for {@link org.treetank.access.PageReadTrx#getMetaPage()}. */ @Test public void testGetMetaPage() { assertEquals(0, PageReadTrx.nodePageOffset(0)); assertEquals(127, PageReadTrx.nodePageOffset(127)); assertEquals(0, PageReadTrx.nodePageOffset(128)); assertEquals(127, PageReadTrx.nodePageOffset(16383)); } }
coremodules/core/src/test/java/org/treetank/access/PageReadTrxTest.java
/** * */ package org.treetank.access; import static org.testng.AssertJUnit.assertEquals; import static org.treetank.CoreTestHelper.getFakedStructure; import org.testng.annotations.AfterMethod; import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; import org.treetank.exception.TTIOException; import org.treetank.io.IBackendReader; /** * * Test-case for PageReadTrx. * * @author Sebastian Graf, University of Konstanz * */ public class PageReadTrxTest { /** * @throws java.lang.Exception */ @BeforeMethod public void setUp() throws Exception { } /** * @throws java.lang.Exception */ @AfterMethod public void tearDown() throws Exception { } /** * Test method for {@link org.treetank.access.PageReadTrx#getNode(long)}. */ @Test public void testGetNode() { // fail("Not yet implemented"); } /** * Test method for {@link org.treetank.access.PageReadTrx#close()}. */ @Test public void testClose() { // fail("Not yet implemented"); } /** * Test method for {@link org.treetank.access.PageReadTrx#getActualRevisionRootPage()}. */ @Test public void testGetActualRevisionRootPage() { // fail("Not yet implemented"); } /** * Test method for {@link org.treetank.access.PageReadTrx#isClosed()}. */ @Test public void testIsClosed() { // fail("Not yet implemented"); } /** * Test method for {@link org.treetank.access.PageReadTrx#checkItemIfDeleted(org.treetank.api.INode)}. */ @Test public void testCheckItemIfDeleted() { // fail("Not yet implemented"); } /** * Test method for {@link org.treetank.access.PageReadTrx#getSnapshotPages(long)}. */ @Test public void testGetSnapshotPages() { // fail("Not yet implemented"); } /** * Test method for * {@link org.treetank.access.PageReadTrx#dereferenceLeafOfTree(org.treetank.io.IBackendReader, long, long)} * . */ @Test public void testDereferenceLeafOfTree() throws TTIOException { int[] offsets = new int[5]; IBackendReader reader = getFakedStructure(offsets); long key = PageReadTrx.dereferenceLeafOfTree(reader, 1, 0); // 6 is base key because of 5 layers plus the 1 as start key assertEquals(6, key); offsets[4] = 127; reader = getFakedStructure(offsets); key = PageReadTrx.dereferenceLeafOfTree(reader, 1, 127); // 6 as base plus 127 as offset on last page assertEquals(133, key); offsets[3] = 1; offsets[4] = 0; reader = getFakedStructure(offsets); key = PageReadTrx.dereferenceLeafOfTree(reader, 1, 128); // 6 as base plus one additional offset on one level above assertEquals(7, key); offsets[3] = 127; offsets[4] = 127; reader = getFakedStructure(offsets); key = PageReadTrx.dereferenceLeafOfTree(reader, 1, 16383); // 6 as base plus two times 127 as offsets on level above assertEquals(260, key); offsets[2] = 1; offsets[3] = 0; offsets[4] = 0; reader = getFakedStructure(offsets); key = PageReadTrx.dereferenceLeafOfTree(reader, 1, 16384); // 6 as base plus one additional offset on two levels above assertEquals(7, key); offsets[2] = 127; offsets[3] = 127; offsets[4] = 127; reader = getFakedStructure(offsets); key = PageReadTrx.dereferenceLeafOfTree(reader, 1, 2097151); // 6 as base plus three times 127 as offsets on levels above assertEquals(387, key); offsets[1] = 1; offsets[2] = 0; offsets[3] = 0; offsets[4] = 0; reader = getFakedStructure(offsets); key = PageReadTrx.dereferenceLeafOfTree(reader, 1, 2097152); // 6 as base plus one additional offset on three levels above assertEquals(7, key); offsets[1] = 127; offsets[2] = 127; offsets[3] = 127; offsets[4] = 127; reader = getFakedStructure(offsets); key = PageReadTrx.dereferenceLeafOfTree(reader, 1, 268435455); // 6 as base plus four times 127 as offsets on levels above assertEquals(514, key); offsets[0] = 1; offsets[1] = 0; offsets[2] = 0; offsets[3] = 0; offsets[4] = 0; reader = getFakedStructure(offsets); key = PageReadTrx.dereferenceLeafOfTree(reader, 1, 268435456); // 6 as base plus one additional offset on three levels above assertEquals(7, key); offsets[0] = 127; offsets[1] = 127; offsets[2] = 127; offsets[3] = 127; offsets[4] = 127; reader = getFakedStructure(offsets); key = PageReadTrx.dereferenceLeafOfTree(reader, 1, 34359738367l); // 6 as base plus five times 127 as offsets on levels above assertEquals(641, key); } /** * Test method for {@link org.treetank.access.PageReadTrx#nodePageKey(long)}. */ @Test public void testNodePageKey() { // fail("Not yet implemented"); } /** * Test method for {@link org.treetank.access.PageReadTrx#nodePageOffset(long)}. */ @Test public void testNodePageOffset() { // fail("Not yet implemented"); } /** * Test method for {@link org.treetank.access.PageReadTrx#getMetaPage()}. */ @Test public void testGetMetaPage() { // fail("Not yet implemented"); } }
[MOD] nodepageoff and nodepagekey-methods tested git-svn-id: a5379eb5ca3beb2b6e029be3b1b7f6aa53f2352b@7160 e3ddb328-5bfe-0310-b762-aafcbcbd2528
coremodules/core/src/test/java/org/treetank/access/PageReadTrxTest.java
[MOD] nodepageoff and nodepagekey-methods tested
<ide><path>oremodules/core/src/test/java/org/treetank/access/PageReadTrxTest.java <ide> // 6 as base plus five times 127 as offsets on levels above <ide> assertEquals(641, key); <ide> <add> // false offset, not existing <add> offsets[0] = 0; <add> reader = getFakedStructure(offsets); <add> key = PageReadTrx.dereferenceLeafOfTree(reader, 1, 34359738367l); <add> assertEquals(-1, key); <ide> } <ide> <ide> /** <ide> */ <ide> @Test <ide> public void testNodePageKey() { <del> // fail("Not yet implemented"); <add> assertEquals(0, PageReadTrx.nodePageKey(0)); <add> assertEquals(1, PageReadTrx.nodePageKey(128)); <add> assertEquals(127, PageReadTrx.nodePageKey(16383)); <ide> } <ide> <ide> /** <ide> */ <ide> @Test <ide> public void testNodePageOffset() { <del> // fail("Not yet implemented"); <add> assertEquals(0, PageReadTrx.nodePageKey(0)); <add> assertEquals(1, PageReadTrx.nodePageKey(128)); <add> assertEquals(127, PageReadTrx.nodePageKey(16383)); <ide> } <ide> <ide> /** <ide> */ <ide> @Test <ide> public void testGetMetaPage() { <del> // fail("Not yet implemented"); <add> assertEquals(0, PageReadTrx.nodePageOffset(0)); <add> assertEquals(127, PageReadTrx.nodePageOffset(127)); <add> assertEquals(0, PageReadTrx.nodePageOffset(128)); <add> assertEquals(127, PageReadTrx.nodePageOffset(16383)); <ide> } <ide> <ide> }
Java
unlicense
ea2cec086b2eac36e1dcd98bad7b0bce43e560f1
0
ggabriel96/binary_search_tree,ggabriel96/red_black_tree
/* * This file is part of binary_search_tree. * * binary_search_tree is free software: you can redistribute it and/or modify * it under the terms of the GNU Affero General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * binary_search_tree is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. * * You should have received a copy of the GNU Affero General Public License * along with binary_search_tree. If not, see <http://www.gnu.org/licenses/>. */ class Tree { Node root; public Tree(int k) { this.root = new Node(k); root.p = root.l = root.r = null; } public Node find(int k) { if (k < this.root.k && this.root.l != null) { return this.root.l.find(k); } else if (k > this.root.k && this.root.l != null) { return this.root.r.find(k); } else return this.root; } public void add(int k) { Node n = this.root.find(k); if (k < n.k) { n.l = new Node(k); n.l.p = n; } else if (k > n.k) { n.r = new Node(k); n.r.p = n; } } public void remove(Node u) { /* This first 'if' treats the case when u has * no children *or* has a right child, but not left. */ if (u.l == null) this.transplant(u, u.r); else if (u.r == null) this.transplant(u, u.l); else { Node v = u.successor(); /* If u has two children, gotta transplant it * with its successor (v). If v is not a child * of u, makes v.r be the new child of v.p. * Then, v.r receives u's right sub-tree. */ if (v.p != u) { this.transplant(v, v.r); v.r = u.r; v.r.p = v; } /* Now, v has already taken u's place, * except for the last step: */ this.transplant(u, v); v.l = u.l; v.l.p = v; /* transplant u and v so that we set the * new parent of v (old u.p). Now, there * is not a single reference to u and then * it will be freed by the garbage collector * (u.l and u.r are intact. That doesn't matter). * If v is a child of u (the right one), then * only the last step is performed. * Important: v.l is null, since v is u's successor. */ } } /* Adjusts v's references to match u's: * u.p.x = v and v.p = u.p (if v is not null). * Doesn't touch u.p, u.l and u.r. u is * still there as though nothing happened. */ private void transplant(Node u, Node v) { if (u.p == null) this.root = v; else if (u == u.p.l) u.p.l = v; else u.p.r = v; if (v != null) v.p = u.p; } public Node min() { return this.root.min(); } public Node max() { return this.root.max(); } public int size() { return this.root.size(); } public int depth() { return this.height(); } public int height() { return this.root.height(); } public void inorderWalk() { this.root.inorderWalk(); } }
src/Tree.java
/* * This file is part of binary_search_tree. * * binary_search_tree is free software: you can redistribute it and/or modify * it under the terms of the GNU Affero General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * binary_search_tree is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. * * You should have received a copy of the GNU Affero General Public License * along with binary_search_tree. If not, see <http://www.gnu.org/licenses/>. */ class Tree { Node root; public Tree(int k) { this.root = new Node(k); root.p = root.l = root.r = null; } public Node find(int k) { if (k < this.root.k && this.root.l != null) { return this.root.l.find(k); } else if (k > this.root.k && this.root.l != null) { return this.root.r.find(k); } else return this.root; } public void add(int k) { Node n = this.root.find(k); if (k < n.k) { n.l = new Node(k); n.l.p = n; } else if (k > n.k) { n.r = new Node(k); n.r.p = n; } } public void remove(Node u) { /* * This first 'if' treats the case when the node has * no children *or* has a right child but not left */ if (u.l == null) this.transplant(u, u.r); else if (u.r == null) this.transplant(u, u.l); else { Node v = u.successor(); /* * If u has two children, gotta transplant it * with its successor (v). If v is not a child * of u, makes v.r be the new child of v.p. * Then, v.r receives u's right sub-tree. */ if (v.p != u) { this.transplant(v, v.r); v.r = u.r; v.r.p = v; } /* * Now, v has already taken u's place, * except for the last step: */ this.transplant(u, v); v.l = u.l; v.l.p = v; /* transplant u and v so that * we set the new parent of v (old u.p). * Now, there is not a single reference * to u and then it will be freed by the * garbage collector (u.l and u.r are intact. * That doesn't matter). If v is a child of * u (the right one), then only the last step * is performed: u.p receives v as its new * child and u.l sub-tree becomes v.l sub-tree * (v.l is null, since v is u's successor) */ } } /* * Adjusts v's references to match u's: * u.p.x = v and v.p = u.p, if v is not null. * Doesn't touch u.p, u.l and u.r. u is * still there as if nothing happened. */ private void transplant(Node u, Node v) { if (u.p == null) this.root = v; else if (u == u.p.l) u.p.l = v; else u.p.r = v; if (v != null) v.p = u.p; } public Node min() { return this.root.min(); } public Node max() { return this.root.max(); } public int size() { return this.root.size(); } public int depth() { return this.height(); } public int height() { return this.root.height(); } public void inorderWalk() { this.root.inorderWalk(); } }
Little adjustments to the remove() commentary.
src/Tree.java
Little adjustments to the remove() commentary.
<ide><path>rc/Tree.java <ide> } <ide> <ide> public void remove(Node u) { <del> /* <del> * This first 'if' treats the case when the node has <del> * no children *or* has a right child but not left <add> /* This first 'if' treats the case when u has <add> * no children *or* has a right child, but not left. <ide> */ <ide> if (u.l == null) this.transplant(u, u.r); <ide> else if (u.r == null) this.transplant(u, u.l); <ide> else { <ide> Node v = u.successor(); <del> /* <del> * If u has two children, gotta transplant it <add> /* If u has two children, gotta transplant it <ide> * with its successor (v). If v is not a child <ide> * of u, makes v.r be the new child of v.p. <ide> * Then, v.r receives u's right sub-tree. <ide> v.r = u.r; <ide> v.r.p = v; <ide> } <del> /* <del> * Now, v has already taken u's place, <add> /* Now, v has already taken u's place, <ide> * except for the last step: <ide> */ <ide> this.transplant(u, v); <ide> v.l = u.l; <ide> v.l.p = v; <del> /* transplant u and v so that <del> * we set the new parent of v (old u.p). <del> * Now, there is not a single reference <del> * to u and then it will be freed by the <del> * garbage collector (u.l and u.r are intact. <del> * That doesn't matter). If v is a child of <del> * u (the right one), then only the last step <del> * is performed: u.p receives v as its new <del> * child and u.l sub-tree becomes v.l sub-tree <del> * (v.l is null, since v is u's successor) <add> /* transplant u and v so that we set the <add> * new parent of v (old u.p). Now, there <add> * is not a single reference to u and then <add> * it will be freed by the garbage collector <add> * (u.l and u.r are intact. That doesn't matter). <add> * If v is a child of u (the right one), then <add> * only the last step is performed. <add> * Important: v.l is null, since v is u's successor. <ide> */ <ide> } <ide> } <ide> <del> /* <del> * Adjusts v's references to match u's: <del> * u.p.x = v and v.p = u.p, if v is not null. <add> /* Adjusts v's references to match u's: <add> * u.p.x = v and v.p = u.p (if v is not null). <ide> * Doesn't touch u.p, u.l and u.r. u is <del> * still there as if nothing happened. <add> * still there as though nothing happened. <ide> */ <ide> private void transplant(Node u, Node v) { <ide> if (u.p == null) this.root = v;
JavaScript
mit
4f1bb001585d99f2bfe344c43f8ce5c82ffcf013
0
infinitered/ignite,infinitered/ignite,infinitered/ignite,infinitered/ignite,ruddell/ignite,ruddell/ignite,infinitered/ignite,infinitered/ignite,infinitered/ignite,lukabers/ignite,lukabers/ignite,infinitered/ignite,lukabers/ignite,lukabers/ignite,ruddell/ignite,lukabers/ignite
import R from 'ramda' import DebugSettings from '../Config/DebugSettings' let globalExamplesRegistry = [] export const addExample = (renderExampleFunc) => { if (DebugSettings.includeExamples) globalExamplesRegistry.push(renderExampleFunc) } const renderExample = (example) => example.call() export const renderExamples = () => R.map(renderExample, globalExamplesRegistry) // Default for readability export default { render: renderExamples, add: addExample }
ignite-base/App/Services/ExamplesRegistry.js
import React, {View} from 'react-native' import R from 'ramda' import DebugSettings from '../Config/DebugSettings' let globalExamplesRegistry = [] export const addExample = (renderExampleFunc) => { if (DebugSettings.includeExamples) globalExamplesRegistry.push(renderExampleFunc) } const renderExample = (example) => example.call() export const renderExamples = () => ( <View> {R.map(renderExample, globalExamplesRegistry)} </View> ) // Default for readability export default { render: renderExamples, add: addExample }
simplify render all
ignite-base/App/Services/ExamplesRegistry.js
simplify render all
<ide><path>gnite-base/App/Services/ExamplesRegistry.js <del>import React, {View} from 'react-native' <ide> import R from 'ramda' <ide> import DebugSettings from '../Config/DebugSettings' <ide> let globalExamplesRegistry = [] <ide> <ide> const renderExample = (example) => example.call() <ide> <del>export const renderExamples = () => ( <del> <View> <del> {R.map(renderExample, globalExamplesRegistry)} <del> </View> <del>) <add>export const renderExamples = () => R.map(renderExample, globalExamplesRegistry) <ide> <ide> // Default for readability <ide> export default {
JavaScript
agpl-3.0
c2e49d0b03234113672792c57d7f8b84fcdcda02
0
eventql/eventql,eventql/eventql,eventql/eventql,eventql/eventql,eventql/eventql,eventql/eventql,eventql/eventql,eventql/eventql
ZBase.registerView((function() { var kPathPrefix = "/a/reports/"; var docsync; var loadReport = function(params) { var query_id = params.path.substr(kPathPrefix.length); $.showLoader(); $.httpGet("/api/v1/documents/" + query_id, function(r) { if (r.status == 200) { var doc = JSON.parse(r.response); renderReport(doc); $.hideLoader(); } else { $.fatalError(); } }); }; var renderReport = function(doc) { var readonly = !doc.is_writable; var page = $.getTemplate( "views/report", "zbase_report_main_tpl"); if (!readonly) { // setup docsync docsync = DocSync( getDocument, "/api/v1/documents/" + doc.uuid, $(".zbase_report_infobar", page)); } //report name $(".report_name", page).innerHTML = doc.name; var report_title = $(".zbase_report_pane .report_name", page) report_title.innerHTML = doc.name; if (!readonly) { report_title.classList.add("editable"); initNameEditModal(); } //stub doc.content = { description: "Top On-Site Search Terms per Language that were referred by Google" }; //report description $(".zbase_report_pane .report_description", page).innerHTML = doc.content.description; $.handleLinks(page); $.replaceViewport(page); }; var initNameEditModal = function() { }; var getDocument = function() { }; var destroy = function() { if (docsync) { docsync.close(); } }; return { name: "report", loadView: loadReport, unloadView: destroy, handleNavigationChange: loadReport }; })());
src/zbase/webui/views/report/report.js
ZBase.registerView((function() { var kPathPrefix = "/a/reports/"; var doc_sync; var loadReport = function(params) { var query_id = params.path.substr(kPathPrefix.length); $.showLoader(); $.httpGet("/api/v1/documents/" + query_id, function(r) { if (r.status == 200) { var doc = JSON.parse(r.response); renderReport(doc); $.hideLoader(); } else { $.fatalError(); } }); }; var renderReport = function(doc) { var readonly = !doc.is_writable; var page = $.getTemplate( "views/report", "zbase_report_main_tpl"); if (!readonly) { // setup docsync docsync = DocSync( getDocument, "/api/v1/documents/" + doc.uuid, $(".zbase_report_infobar", page)); } //report name $(".report_name", page).innerHTML = doc.name; var report_title = $(".zbase_report_pane .report_name", page) report_title.innerHTML = doc.name; if (!readonly) { report_title.classList.add("editable"); initNameEditModal(); } //stub doc.content = { description: "Top On-Site Search Terms per Language that were referred by Google" }; //report description $(".zbase_report_pane .report_description", page).innerHTML = doc.content.description; $.handleLinks(page); $.replaceViewport(page); }; var initNameEditModal = function() { }; return { name: "report", loadView: loadReport, unloadView: function() {}, handleNavigationChange: loadReport }; })());
init docsync
src/zbase/webui/views/report/report.js
init docsync
<ide><path>rc/zbase/webui/views/report/report.js <ide> ZBase.registerView((function() { <ide> var kPathPrefix = "/a/reports/"; <del> var doc_sync; <add> <add> var docsync; <ide> <ide> var loadReport = function(params) { <ide> var query_id = params.path.substr(kPathPrefix.length); <ide> <ide> }; <ide> <add> <add> var getDocument = function() { <add> <add> }; <add> <add> <add> var destroy = function() { <add> if (docsync) { <add> docsync.close(); <add> } <add> }; <add> <ide> return { <ide> name: "report", <ide> loadView: loadReport, <del> unloadView: function() {}, <add> unloadView: destroy, <ide> handleNavigationChange: loadReport <ide> }; <ide>
Java
mit
44e4253e996e327853c22076c5c1374d83df735b
0
dmeybohm/organize-php-imports-plugin
package com.daveme.intellij.organizephpimports; import com.intellij.lang.LanguageImportStatements; import com.intellij.openapi.actionSystem.AnAction; import com.intellij.openapi.actionSystem.AnActionEvent; import com.intellij.openapi.actionSystem.CommonDataKeys; import com.intellij.openapi.project.Project; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.psi.PsiFile; import com.intellij.psi.PsiManager; import com.intellij.psi.util.PsiUtilCore; import com.jetbrains.php.lang.psi.PhpFile; import org.jetbrains.annotations.NotNull; import java.util.ArrayList; public class OrganizeImportsAction extends AnAction { @Override public void update(@NotNull AnActionEvent e) { super.update(e); final VirtualFile[] virtualFiles = e.getData(CommonDataKeys.VIRTUAL_FILE_ARRAY); final Project project = e.getData(CommonDataKeys.PROJECT); final PsiFile[] psiFiles = convertToPsiFiles(virtualFiles, project); boolean visible = containsAtLeastOnePhpFile(psiFiles); boolean enabled = visible && hasImportStatements(psiFiles); e.getPresentation().setVisible(visible); e.getPresentation().setEnabled(enabled); } @Override public void actionPerformed(@NotNull AnActionEvent e) { final Project project = e.getData(CommonDataKeys.PROJECT); final VirtualFile[] virtualFiles = e.getData(CommonDataKeys.VIRTUAL_FILE_ARRAY); final PsiFile[] psiFiles = convertToPsiFiles(virtualFiles, project); new OrganizeImportsProcessor(project, psiFiles).execute(); } private static boolean hasImportStatements(final PsiFile[] files) { // @todo account for multiple projects? for (PsiFile file : files) { if (!LanguageImportStatements.INSTANCE.forFile(file).isEmpty()) { return true; } } return false; } private static boolean containsAtLeastOnePhpFile(final PsiFile[] files) { if (files == null) return false; if (files.length < 1) return false; for (PsiFile file : files) { if (file.getVirtualFile().isDirectory()) continue; if (!(file instanceof PhpFile)) continue; return true; } return false; } private static PsiFile[] convertToPsiFiles(final VirtualFile[] files, Project project) { final PsiManager manager = PsiManager.getInstance(project); final ArrayList<PsiFile> result = new ArrayList<PsiFile>(); for (VirtualFile virtualFile : files) { final PsiFile psiFile = manager.findFile(virtualFile); if (psiFile instanceof PhpFile) result.add(psiFile); } return PsiUtilCore.toPsiFileArray(result); } }
src/com/daveme/intellij/organizephpimports/OrganizeImportsAction.java
package com.daveme.intellij.organizephpimports; import com.intellij.lang.LanguageImportStatements; import com.intellij.openapi.actionSystem.AnAction; import com.intellij.openapi.actionSystem.AnActionEvent; import com.intellij.openapi.actionSystem.CommonDataKeys; import com.intellij.openapi.project.Project; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.psi.PsiFile; import com.intellij.psi.PsiManager; import com.intellij.psi.util.PsiUtilCore; import com.jetbrains.php.lang.psi.PhpFile; import org.jetbrains.annotations.NotNull; import java.util.ArrayList; public class OrganizeImportsAction extends AnAction { @Override public void update(@NotNull AnActionEvent e) { super.update(e); final VirtualFile[] virtualFiles = e.getData(CommonDataKeys.VIRTUAL_FILE_ARRAY); final Project project = e.getData(CommonDataKeys.PROJECT); final PsiFile[] psiFiles = convertToPsiFiles(virtualFiles, project); boolean visible = containsAtLeastOnePhpFile(psiFiles); boolean enabled = visible && hasImportStatements(psiFiles, project); e.getPresentation().setVisible(visible); e.getPresentation().setEnabled(enabled); } @Override public void actionPerformed(@NotNull AnActionEvent e) { final Project project = e.getData(CommonDataKeys.PROJECT); final VirtualFile[] virtualFiles = e.getData(CommonDataKeys.VIRTUAL_FILE_ARRAY); final PsiFile[] psiFiles = convertToPsiFiles(virtualFiles, project); new OrganizeImportsProcessor(project, psiFiles).execute(); } private static boolean hasImportStatements(final PsiFile[] files, final Project project) { // @todo account for multiple projects? for (PsiFile file : files) { if (!LanguageImportStatements.INSTANCE.forFile(file).isEmpty()) { return true; } } return false; } private static boolean containsAtLeastOnePhpFile(final PsiFile[] files) { if (files == null) return false; if (files.length < 1) return false; for (PsiFile file : files) { if (file.getVirtualFile().isDirectory()) continue; if (!(file instanceof PhpFile)) continue; return true; } return false; } private static PsiFile[] convertToPsiFiles(final VirtualFile[] files, Project project) { final PsiManager manager = PsiManager.getInstance(project); final ArrayList<PsiFile> result = new ArrayList<PsiFile>(); for (VirtualFile virtualFile : files) { final PsiFile psiFile = manager.findFile(virtualFile); if (psiFile instanceof PhpFile) result.add(psiFile); } return PsiUtilCore.toPsiFileArray(result); } }
Delete unused var
src/com/daveme/intellij/organizephpimports/OrganizeImportsAction.java
Delete unused var
<ide><path>rc/com/daveme/intellij/organizephpimports/OrganizeImportsAction.java <ide> final PsiFile[] psiFiles = convertToPsiFiles(virtualFiles, project); <ide> <ide> boolean visible = containsAtLeastOnePhpFile(psiFiles); <del> boolean enabled = visible && hasImportStatements(psiFiles, project); <add> boolean enabled = visible && hasImportStatements(psiFiles); <ide> e.getPresentation().setVisible(visible); <ide> e.getPresentation().setEnabled(enabled); <ide> } <ide> new OrganizeImportsProcessor(project, psiFiles).execute(); <ide> } <ide> <del> private static boolean hasImportStatements(final PsiFile[] files, final Project project) { <add> private static boolean hasImportStatements(final PsiFile[] files) { <ide> // @todo account for multiple projects? <ide> for (PsiFile file : files) { <ide> if (!LanguageImportStatements.INSTANCE.forFile(file).isEmpty()) {
Java
apache-2.0
6d1a8dd78727e55b3d19a22279a83a3d9dcd01b3
0
ceylon/ceylon-js,ceylon/ceylon-js,ceylon/ceylon-js
package com.redhat.ceylon.compiler.js; import java.io.IOException; import java.io.Writer; import java.util.ArrayList; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.Stack; import org.antlr.runtime.CommonToken; import com.redhat.ceylon.compiler.typechecker.analyzer.AnalysisWarning; import com.redhat.ceylon.compiler.typechecker.model.Class; import com.redhat.ceylon.compiler.typechecker.model.ClassOrInterface; import com.redhat.ceylon.compiler.typechecker.model.Declaration; import com.redhat.ceylon.compiler.typechecker.model.Functional; import com.redhat.ceylon.compiler.typechecker.model.Getter; import com.redhat.ceylon.compiler.typechecker.model.ImportableScope; import com.redhat.ceylon.compiler.typechecker.model.Interface; import com.redhat.ceylon.compiler.typechecker.model.InterfaceAlias; import com.redhat.ceylon.compiler.typechecker.model.Method; import com.redhat.ceylon.compiler.typechecker.model.MethodOrValue; import com.redhat.ceylon.compiler.typechecker.model.Module; import com.redhat.ceylon.compiler.typechecker.model.Package; import com.redhat.ceylon.compiler.typechecker.model.ProducedType; import com.redhat.ceylon.compiler.typechecker.model.Scope; import com.redhat.ceylon.compiler.typechecker.model.Setter; import com.redhat.ceylon.compiler.typechecker.model.Specification; import com.redhat.ceylon.compiler.typechecker.model.TypeDeclaration; import com.redhat.ceylon.compiler.typechecker.model.TypeParameter; import com.redhat.ceylon.compiler.typechecker.model.Util; import com.redhat.ceylon.compiler.typechecker.model.Value; import com.redhat.ceylon.compiler.typechecker.tree.*; import com.redhat.ceylon.compiler.typechecker.tree.Tree.PositionalArgument; import com.redhat.ceylon.compiler.typechecker.tree.Tree.*; public class GenerateJsVisitor extends Visitor implements NaturalVisitor { private boolean indent=true; private boolean comment=true; private boolean verbose=false; private final Stack<Continuation> continues = new Stack<Continuation>(); private final EnclosingFunctionVisitor encloser = new EnclosingFunctionVisitor(); private final JsIdentifierNames names; private final Set<Declaration> directAccess = new HashSet<Declaration>(); private final RetainedVars retainedVars = new RetainedVars(); private final Map<String, String> importedModules; final ConditionGenerator conds; private final InvocationGenerator invoker; private final List<CommonToken> tokens; private final class SuperVisitor extends Visitor { private final List<Declaration> decs; private SuperVisitor(List<Declaration> decs) { this.decs = decs; } @Override public void visit(QualifiedMemberOrTypeExpression qe) { if (qe.getPrimary() instanceof Super) { decs.add(qe.getDeclaration()); } super.visit(qe); } @Override public void visit(BaseMemberOrTypeExpression that) { if (that.getSupertypeQualifier() != null) { decs.add(that.getDeclaration()); } super.visit(that); } @Override public void visit(QualifiedType that) { if (that.getOuterType() instanceof SuperType) { decs.add(that.getDeclarationModel()); } super.visit(that); } public void visit(Tree.ClassOrInterface qe) { //don't recurse if (qe instanceof ClassDefinition) { ExtendedType extType = ((ClassDefinition) qe).getExtendedType(); if (extType != null) { super.visit(extType); } } } } private final class OuterVisitor extends Visitor { boolean found = false; private Declaration dec; private OuterVisitor(Declaration dec) { this.dec = dec; } @Override public void visit(QualifiedMemberOrTypeExpression qe) { if (qe.getPrimary() instanceof Outer || qe.getPrimary() instanceof This) { if ( qe.getDeclaration().equals(dec) ) { found = true; } } super.visit(qe); } } private final TypeUtils types; private final Writer out; private final boolean prototypeStyle; private CompilationUnit root; private static String clAlias=""; private static final String function="function "; private boolean needIndent = true; private int indentLevel = 0; private static void setCLAlias(String alias) { clAlias = alias + "."; } /** Returns the module name for the language module. */ static String getClAlias() { return clAlias; } @Override public void handleException(Exception e, Node that) { that.addUnexpectedError(that.getMessage(e, this)); } public GenerateJsVisitor(Writer out, boolean prototypeStyle, JsIdentifierNames names, List<CommonToken> tokens, Map<String,String> imports, TypeUtils typeUtils) { this.out = out; this.prototypeStyle=prototypeStyle; this.names = names; conds = new ConditionGenerator(this, names, directAccess); this.tokens = tokens; importedModules = imports; types = typeUtils; invoker = new InvocationGenerator(this, names, retainedVars); } TypeUtils getTypeUtils() { return types; } /** Tells the receiver whether to add comments to certain declarations. Default is true. */ public void setAddComments(boolean flag) { comment = flag; } public boolean isAddComments() { return comment; } /** Tells the receiver whether to indent the generated code. Default is true. */ public void setIndent(boolean flag) { indent = flag; } /** Tells the receiver to be verbose (prints generated code to STDOUT in addition to writer) */ public void setVerbose(boolean flag) { verbose = flag; } /** Returns the helper component to handle naming. */ JsIdentifierNames getNames() { return names; } /** Print generated code to the Writer specified at creation time. * Automatically prints indentation first if necessary. * @param code The main code * @param codez Optional additional strings to print after the main code. */ void out(String code, String... codez) { try { if (indent && needIndent) { for (int i=0;i<indentLevel;i++) { out.write(" "); } } needIndent = false; out.write(code); for (String s : codez) { out.write(s); } if (verbose) { System.out.print(code); for (String s : codez) { System.out.print(s); } } } catch (IOException ioe) { throw new RuntimeException("Generating JS code", ioe); } } /** Prints a newline. Indentation will automatically be printed by {@link #out(String, String...)} * when the next line is started. */ void endLine() { endLine(false); } /** Prints a newline. Indentation will automatically be printed by {@link #out(String, String...)} * when the next line is started. * @param semicolon if <code>true</code> then a semicolon is printed at the end * of the previous line*/ void endLine(boolean semicolon) { if (semicolon) { out(";"); } out("\n"); needIndent = true; } /** Calls {@link #endLine()} if the current position is not already the beginning * of a line. */ void beginNewLine() { if (!needIndent) { endLine(); } } /** Increases indentation level, prints opening brace and newline. Indentation will * automatically be printed by {@link #out(String, String...)} when the next line is started. */ void beginBlock() { indentLevel++; out("{"); endLine(); } /** Decreases indentation level, prints a closing brace in new line (using * {@link #beginNewLine()}) and calls {@link #endLine()}. */ void endBlockNewLine() { endBlock(false, true); } /** Decreases indentation level, prints a closing brace in new line (using * {@link #beginNewLine()}) and calls {@link #endLine()}. * @param semicolon if <code>true</code> then prints a semicolon after the brace*/ void endBlockNewLine(boolean semicolon) { endBlock(semicolon, true); } /** Decreases indentation level and prints a closing brace in new line (using * {@link #beginNewLine()}). */ void endBlock() { endBlock(false, false); } /** Decreases indentation level and prints a closing brace in new line (using * {@link #beginNewLine()}). * @param semicolon if <code>true</code> then prints a semicolon after the brace * @param newline if <code>true</code> then additionally calls {@link #endLine()} */ void endBlock(boolean semicolon, boolean newline) { indentLevel--; beginNewLine(); out(semicolon ? "};" : "}"); if (newline) { endLine(); } } /** Prints source code location in the form "at [filename] ([location])" */ void location(Node node) { out(" at ", node.getUnit().getFilename(), " (", node.getLocation(), ")"); } @Override public void visit(CompilationUnit that) { root = that; Module clm = that.getUnit().getPackage().getModule() .getLanguageModule(); if (!JsCompiler.compilingLanguageModule) { require(clm); setCLAlias(names.moduleAlias(clm)); } for (CompilerAnnotation ca: that.getCompilerAnnotations()) { ca.visit(this); } if (that.getImportList() != null) { that.getImportList().visit(this); } visitStatements(that.getDeclarations()); } public void visit(Import that) { ImportableScope scope = that.getImportMemberOrTypeList().getImportList().getImportedScope(); if (scope instanceof Package) { require(((Package) scope).getModule()); } } private void require(Module mod) { final String path = scriptPath(mod); final String modAlias = names.moduleAlias(mod); if (importedModules.put(path, modAlias) == null) { out("var ", modAlias, "=require('", path, "');"); endLine(); } } private String scriptPath(Module mod) { StringBuilder path = new StringBuilder(mod.getNameAsString().replace('.', '/')).append('/'); if (!mod.isDefault()) { path.append(mod.getVersion()).append('/'); } path.append(mod.getNameAsString()); if (!mod.isDefault()) { path.append('-').append(mod.getVersion()); } return path.toString(); } @Override public void visit(Parameter that) { out(names.name(that.getDeclarationModel())); } @Override public void visit(ParameterList that) { out("("); boolean first=true; boolean ptypes = false; for (Parameter param: that.getParameters()) { if (!first) out(","); com.redhat.ceylon.compiler.typechecker.model.Parameter d = param.getDeclarationModel(); if (!ptypes && d.getScope() instanceof Method) { List<TypeParameter> tparms = ((Method)d.getScope()).getTypeParameters(); if (tparms != null && !tparms.isEmpty()) { for (TypeParameter tp : ((Method)d.getScope()).getTypeParameters()) { ptypes |= TypeUtils.typeContainsTypeParameter(d.getType(), tp) != null; } } } out(names.name(d)); first = false; } if (ptypes) { if (!first) out(","); out("$$$mptypes"); } out(")"); } private void visitStatements(List<? extends Statement> statements) { List<String> oldRetainedVars = retainedVars.reset(null); for (int i=0; i<statements.size(); i++) { Statement s = statements.get(i); s.visit(this); beginNewLine(); retainedVars.emitRetainedVars(this); } retainedVars.reset(oldRetainedVars); } @Override public void visit(Body that) { visitStatements(that.getStatements()); } @Override public void visit(Block that) { List<Statement> stmnts = that.getStatements(); if (stmnts.isEmpty()) { out("{}"); } else { beginBlock(); initSelf(that); visitStatements(stmnts); endBlock(); } } private void initSelf(Block block) { initSelf(block.getScope()); } private void initSelf(Scope scope) { if ((prototypeOwner != null) && ((scope instanceof MethodOrValue) || (scope instanceof TypeDeclaration) || (scope instanceof Specification))) { out("var "); self(prototypeOwner); out("=this;"); endLine(); } } private void comment(Tree.Declaration that) { if (!comment) return; endLine(); out("//", that.getNodeType(), " ", that.getDeclarationModel().getName()); location(that); endLine(); } private void var(Declaration d) { out("var ", names.name(d), "="); } private boolean share(Declaration d) { return share(d, true); } private boolean share(Declaration d, boolean excludeProtoMembers) { boolean shared = false; if (!(excludeProtoMembers && prototypeStyle && d.isClassOrInterfaceMember()) && isCaptured(d)) { beginNewLine(); outerSelf(d); out(".", names.name(d), "=", names.name(d), ";"); endLine(); shared = true; } return shared; } @Override public void visit(ClassDeclaration that) { //Don't even bother with nodes that have errors if (that.getErrors() != null && !that.getErrors().isEmpty()) { //But warnings are ok for (Message err : that.getErrors()) { if (!(err instanceof AnalysisWarning)) { return; } } } Class d = that.getDeclarationModel(); if (prototypeStyle && d.isClassOrInterfaceMember()) return; comment(that); out(function, names.name(d), "("); //Generate each parameter because we need to append one at the end for (Parameter p: that.getParameterList().getParameters()) { p.visit(this); out(", "); } self(d); out(")"); ExtendedType ext = that.getExtendedType(); TypeDeclaration aliased = ext.getType().getDeclarationModel(); out("{return "); qualify(ext.getType(), aliased); out(names.name(aliased), "("); if (ext.getInvocationExpression().getPositionalArgumentList() != null) { ext.getInvocationExpression().getPositionalArgumentList().visit(this); if (!ext.getInvocationExpression().getPositionalArgumentList().getPositionalArguments().isEmpty()) { out(","); } } else { out("/*PENDIENTE*/"); } self(d); out(");}"); endLine(); out(names.name(d), ".$$="); qualify(ext, aliased); out(names.name(aliased), ".$$;"); endLine(); share(d); } private void addClassDeclarationToPrototype(TypeDeclaration outer, ClassDeclaration that) { comment(that); TypeDeclaration dec = that.getExtendedType().getType().getTypeModel().getDeclaration(); String path = qualifiedPath(that, dec, true); if (path.length() > 0) { path += '.'; } out(names.self(outer), ".", names.name(that.getDeclarationModel()), "=", path, names.name(dec), ";"); endLine(); } @Override public void visit(InterfaceDeclaration that) { //Don't even bother with nodes that have errors if (that.getErrors() != null && !that.getErrors().isEmpty()) return; Interface d = that.getDeclarationModel(); if (prototypeStyle && d.isClassOrInterfaceMember()) return; //It's pointless declaring interface aliases outside of classes/interfaces Scope scope = that.getScope(); if (scope instanceof InterfaceAlias) { scope = scope.getContainer(); if (!(scope instanceof ClassOrInterface)) return; } comment(that); var(d); TypeDeclaration dec = that.getTypeSpecifier().getType().getTypeModel() .getDeclaration(); qualify(that,dec); out(names.name(dec), ";"); endLine(); share(d); } private void addInterfaceDeclarationToPrototype(TypeDeclaration outer, InterfaceDeclaration that) { comment(that); TypeDeclaration dec = that.getTypeSpecifier().getType().getTypeModel().getDeclaration(); String path = qualifiedPath(that, dec, true); if (path.length() > 0) { path += '.'; } out(names.self(outer), ".", names.name(that.getDeclarationModel()), "=", path, names.name(dec), ";"); endLine(); } private void addInterfaceToPrototype(ClassOrInterface type, InterfaceDefinition interfaceDef) { interfaceDefinition(interfaceDef); Interface d = interfaceDef.getDeclarationModel(); out(names.self(type), ".", names.name(d), "=", names.name(d), ";"); endLine(); } @Override public void visit(InterfaceDefinition that) { //Don't even bother with nodes that have errors if (that.getErrors() != null && !that.getErrors().isEmpty()) return; if (!(prototypeStyle && that.getDeclarationModel().isClassOrInterfaceMember())) { interfaceDefinition(that); } } private void interfaceDefinition(InterfaceDefinition that) { Interface d = that.getDeclarationModel(); comment(that); out(function, names.name(d), "("); self(d); out(")"); beginBlock(); //declareSelf(d); referenceOuter(d); final List<Declaration> superDecs = new ArrayList<Declaration>(); if (!prototypeStyle) { new SuperVisitor(superDecs).visit(that.getInterfaceBody()); } callInterfaces(that.getSatisfiedTypes(), d, that, superDecs); that.getInterfaceBody().visit(this); //returnSelf(d); endBlockNewLine(); share(d); typeInitialization(that); } private void addClassToPrototype(ClassOrInterface type, ClassDefinition classDef) { classDefinition(classDef); Class d = classDef.getDeclarationModel(); out(names.self(type), ".", names.name(d), "=", names.name(d), ";"); endLine(); } @Override public void visit(ClassDefinition that) { //Don't even bother with nodes that have errors if (that.getErrors() != null && !that.getErrors().isEmpty()) return; if (!(prototypeStyle && that.getDeclarationModel().isClassOrInterfaceMember())) { classDefinition(that); } } private void classDefinition(ClassDefinition that) { Class d = that.getDeclarationModel(); comment(that); out(function, names.name(d), "("); for (Parameter p: that.getParameterList().getParameters()) { p.visit(this); out(", "); } boolean withTargs = that.getTypeParameterList() != null && !that.getTypeParameterList().getTypeParameterDeclarations().isEmpty(); if (withTargs) { out("$$targs$$,"); } self(d); out(")"); beginBlock(); //This takes care of top-level attributes defined before the class definition out("$init$", names.name(d), "();"); endLine(); declareSelf(d); if (withTargs) { self(d); out(".$$targs$$=$$targs$$;"); endLine(); } else { //Check if any of the satisfied types have type arguments if (that.getSatisfiedTypes() != null) { for(Tree.StaticType sat : that.getSatisfiedTypes().getTypes()) { boolean first = true; List<ProducedType> targs = sat.getTypeModel().getTypeArgumentList(); if (targs != null && !targs.isEmpty()) { if (first) { self(d); out(".$$targs$$="); TypeUtils.printTypeArguments(that, targs, this); endLine(true); } else { out("/*TODO: more type arguments*/"); endLine(); } } } } } referenceOuter(d); initParameters(that.getParameterList(), d); final List<Declaration> superDecs = new ArrayList<Declaration>(); if (!prototypeStyle) { new SuperVisitor(superDecs).visit(that.getClassBody()); } callSuperclass(that.getExtendedType(), d, that, superDecs); callInterfaces(that.getSatisfiedTypes(), d, that, superDecs); that.getClassBody().visit(this); returnSelf(d); endBlockNewLine(); share(d); typeInitialization(that); } private void referenceOuter(TypeDeclaration d) { if (prototypeStyle && d.isClassOrInterfaceMember()) { self(d); out("."); outerSelf(d); out("=this;"); endLine(); } } private void copySuperMembers(TypeDeclaration typeDecl, final List<Declaration> decs, ClassOrInterface d) { if (!prototypeStyle) { for (Declaration dec: decs) { if (!typeDecl.isMember(dec)) { continue; } String suffix = names.scopeSuffix(dec.getContainer()); if (dec instanceof Value) { superGetterRef(dec,d,suffix); if (((Value) dec).isVariable()) { superSetterRef(dec,d,suffix); } } else if (dec instanceof Getter) { superGetterRef(dec,d,suffix); if (((Getter) dec).isVariable()) { superSetterRef(dec,d,suffix); } } else { superRef(dec,d,suffix); } } } } private void callSuperclass(ExtendedType extendedType, Class d, Node that, final List<Declaration> superDecs) { if (extendedType!=null) { TypeDeclaration typeDecl = extendedType.getType().getDeclarationModel(); List<PositionalArgument> argList = extendedType.getInvocationExpression() .getPositionalArgumentList().getPositionalArguments(); qualify(that, typeDecl); out(memberAccessBase(extendedType.getType(), names.name(typeDecl), false), (prototypeStyle && (getSuperMemberScope(extendedType.getType()) != null)) ? ".call(this," : "("); for (PositionalArgument arg: argList) { arg.visit(this); out(","); } //If the supertype has type arguments, add them to the call if (typeDecl.getTypeParameters() != null && !typeDecl.getTypeParameters().isEmpty()) { TypeUtils.printTypeArguments(that, extendedType.getType().getTypeArgumentList().getTypeModels(), this); out(","); } self(d); out(");"); endLine(); copySuperMembers(typeDecl, superDecs, d); } } private void callInterfaces(SatisfiedTypes satisfiedTypes, ClassOrInterface d, Node that, final List<Declaration> superDecs) { if (satisfiedTypes!=null) { for (StaticType st: satisfiedTypes.getTypes()) { TypeDeclaration typeDecl = st.getTypeModel().getDeclaration(); if (typeDecl.isAlias()) { typeDecl = typeDecl.getExtendedTypeDeclaration(); } qualify(that, typeDecl); out(names.name((ClassOrInterface)typeDecl), "("); self(d); out(");"); endLine(); copySuperMembers(typeDecl, superDecs, d); } } } /** Generates a function to initialize the specified type. */ private void typeInitialization(final Tree.Declaration type) { ExtendedType extendedType = null; SatisfiedTypes satisfiedTypes = null; boolean isInterface = false; ClassOrInterface decl = null; if (type instanceof ClassDefinition) { ClassDefinition classDef = (ClassDefinition) type; extendedType = classDef.getExtendedType(); satisfiedTypes = classDef.getSatisfiedTypes(); decl = classDef.getDeclarationModel(); } else if (type instanceof InterfaceDefinition) { satisfiedTypes = ((InterfaceDefinition) type).getSatisfiedTypes(); isInterface = true; decl = ((InterfaceDefinition) type).getDeclarationModel(); } else if (type instanceof ObjectDefinition) { ObjectDefinition objectDef = (ObjectDefinition) type; extendedType = objectDef.getExtendedType(); satisfiedTypes = objectDef.getSatisfiedTypes(); decl = (ClassOrInterface)objectDef.getDeclarationModel().getTypeDeclaration(); } final PrototypeInitCallback callback = new PrototypeInitCallback() { @Override public void addToPrototypeCallback() { if (type instanceof ClassDefinition) { addToPrototype(((ClassDefinition)type).getDeclarationModel(), ((ClassDefinition)type).getClassBody().getStatements()); } else if (type instanceof InterfaceDefinition) { addToPrototype(((InterfaceDefinition)type).getDeclarationModel(), ((InterfaceDefinition)type).getInterfaceBody().getStatements()); } } }; typeInitialization(extendedType, satisfiedTypes, isInterface, decl, callback); } /** This is now the main method to generate the type initialization code. * @param extendedType The type that is being extended. * @param satisfiedTypes The types satisfied by the type being initialized. * @param isInterface Tells whether the type being initialized is an interface * @param d The declaration for the type being initialized * @param callback A callback to add something more to the type initializer in prototype style. */ private void typeInitialization(ExtendedType extendedType, SatisfiedTypes satisfiedTypes, boolean isInterface, ClassOrInterface d, PrototypeInitCallback callback) { //Let's always use initTypeProto to avoid #113 String initFuncName = "initTypeProto"; out("function $init$", names.name(d), "()"); beginBlock(); out("if (", names.name(d), ".$$===undefined)"); beginBlock(); String qns = d.getQualifiedNameString(); if (JsCompiler.compilingLanguageModule && qns.indexOf("::") < 0) { //Language module files get compiled in default module //so they need to have this added to their qualified name qns = "ceylon.language::" + qns; } out(clAlias, initFuncName, "(", names.name(d), ",'", qns, "'"); if (extendedType != null) { out(",", typeFunctionName(extendedType.getType(), false)); } else if (!isInterface) { out(",", clAlias, "Basic"); } if (satisfiedTypes != null) { for (StaticType satType : satisfiedTypes.getTypes()) { TypeDeclaration tdec = satType.getTypeModel().getDeclaration(); if (tdec.isAlias()) { tdec = tdec.getExtendedTypeDeclaration(); } String fname = typeFunctionName(satType, true); //Actually it could be "if not in same module" if (!JsCompiler.compilingLanguageModule && declaredInCL(tdec)) { out(",", fname); } else { int idx = fname.lastIndexOf('.'); if (idx > 0) { fname = fname.substring(0, idx+1) + "$init$" + fname.substring(idx+1); } else { fname = "$init$" + fname; } out(",", fname, "()"); } } } out(");"); //The class definition needs to be inside the init function if we want forwards decls to work in prototype style if (prototypeStyle) { endLine(); callback.addToPrototypeCallback(); } endBlockNewLine(); out("return ", names.name(d), ";"); endBlockNewLine(); //If it's nested, share the init function if (outerSelf(d)) { out(".$init$", names.name(d), "=$init$", names.name(d), ";"); endLine(); } out("$init$", names.name(d), "();"); endLine(); } private String typeFunctionName(StaticType type, boolean removeAlias) { TypeDeclaration d = type.getTypeModel().getDeclaration(); if (removeAlias && d.isAlias()) { d = d.getExtendedTypeDeclaration(); } boolean inProto = prototypeStyle && (type.getScope().getContainer() instanceof TypeDeclaration); String constr = qualifiedPath(type, d, inProto); if (constr.length() > 0) { constr += '.'; } constr += memberAccessBase(type, names.name(d), false); return constr; } private void addToPrototype(ClassOrInterface d, List<Statement> statements) { if (prototypeStyle && !statements.isEmpty()) { out("(function(", names.self(d), ")"); beginBlock(); for (Statement s: statements) { addToPrototype(d, s); } endBlock(); out(")(", names.name(d), ".$$.prototype);"); endLine(); } } private ClassOrInterface prototypeOwner; private void addToPrototype(ClassOrInterface d, Statement s) { ClassOrInterface oldPrototypeOwner = prototypeOwner; prototypeOwner = d; if (s instanceof MethodDefinition) { addMethodToPrototype(d, (MethodDefinition)s); } else if (s instanceof MethodDeclaration) { methodDeclaration(d, (MethodDeclaration) s); } else if (s instanceof AttributeGetterDefinition) { addGetterToPrototype(d, (AttributeGetterDefinition)s); } else if (s instanceof AttributeSetterDefinition) { addSetterToPrototype(d, (AttributeSetterDefinition)s); } else if (s instanceof AttributeDeclaration) { addGetterAndSetterToPrototype(d, (AttributeDeclaration) s); } else if (s instanceof ClassDefinition) { addClassToPrototype(d, (ClassDefinition) s); } else if (s instanceof InterfaceDefinition) { addInterfaceToPrototype(d, (InterfaceDefinition) s); } else if (s instanceof ObjectDefinition) { addObjectToPrototype(d, (ObjectDefinition) s); } else if (s instanceof ClassDeclaration) { addClassDeclarationToPrototype(d, (ClassDeclaration) s); } else if (s instanceof InterfaceDeclaration) { addInterfaceDeclarationToPrototype(d, (InterfaceDeclaration) s); } else if (s instanceof SpecifierStatement) { addSpecifierToPrototype(d, (SpecifierStatement) s); } prototypeOwner = oldPrototypeOwner; } private void declareSelf(ClassOrInterface d) { out("if ("); self(d); out("===undefined)"); self(d); out("=new "); if (prototypeStyle && d.isClassOrInterfaceMember()) { out("this.", names.name(d), ".$$;"); } else { out(names.name(d), ".$$;"); } endLine(); /*out("var "); self(d); out("="); self(); out(";"); endLine();*/ } private void instantiateSelf(ClassOrInterface d) { out("var "); self(d); out("=new "); if (prototypeStyle && d.isClassOrInterfaceMember()) { out("this.", names.name(d), ".$$;"); } else { out(names.name(d), ".$$;"); } endLine(); } private void returnSelf(ClassOrInterface d) { out("return "); self(d); out(";"); } private void addObjectToPrototype(ClassOrInterface type, ObjectDefinition objDef) { objectDefinition(objDef); Value d = objDef.getDeclarationModel(); Class c = (Class) d.getTypeDeclaration(); out(names.self(type), ".", names.name(c), "=", names.name(c), ";"); endLine(); } @Override public void visit(ObjectDefinition that) { //Don't even bother with nodes that have errors if (that.getErrors() != null && !that.getErrors().isEmpty()) return; Value d = that.getDeclarationModel(); if (!(prototypeStyle && d.isClassOrInterfaceMember())) { objectDefinition(that); } else { Class c = (Class) d.getTypeDeclaration(); comment(that); outerSelf(d); out(".", names.name(d), "="); outerSelf(d); out(".", names.name(c), "();"); endLine(); } } private void objectDefinition(ObjectDefinition that) { Value d = that.getDeclarationModel(); boolean addToPrototype = prototypeStyle && d.isClassOrInterfaceMember(); Class c = (Class) d.getTypeDeclaration(); comment(that); out(function, names.name(c), "()"); beginBlock(); instantiateSelf(c); referenceOuter(c); final List<Declaration> superDecs = new ArrayList<Declaration>(); if (!prototypeStyle) { new SuperVisitor(superDecs).visit(that.getClassBody()); } callSuperclass(that.getExtendedType(), c, that, superDecs); callInterfaces(that.getSatisfiedTypes(), c, that, superDecs); that.getClassBody().visit(this); returnSelf(c); indentLevel--; endLine(); out("}"); endLine(); typeInitialization(that); addToPrototype(c, that.getClassBody().getStatements()); if (!addToPrototype) { out("var ", names.name(d), "=", names.name(c), "(new ", names.name(c), ".$$);"); endLine(); } out("var ", names.getter(d), "=function()"); beginBlock(); out("return "); if (addToPrototype) { out("this."); } out(names.name(d), ";"); endBlockNewLine(); if (addToPrototype || d.isShared()) { outerSelf(d); out(".", names.getter(d), "=", names.getter(d), ";"); endLine(); } } private void superRef(Declaration d, ClassOrInterface sub, String parentSuffix) { //if (d.isActual()) { self(sub); out(".", names.name(d), parentSuffix, "="); self(sub); out(".", names.name(d), ";"); endLine(); //} } private void superGetterRef(Declaration d, ClassOrInterface sub, String parentSuffix) { //if (d.isActual()) { self(sub); out(".", names.getter(d), parentSuffix, "="); self(sub); out(".", names.getter(d), ";"); endLine(); //} } private void superSetterRef(Declaration d, ClassOrInterface sub, String parentSuffix) { //if (d.isActual()) { self(sub); out(".", names.setter(d), parentSuffix, "="); self(sub); out(".", names.setter(d), ";"); endLine(); //} } @Override public void visit(MethodDeclaration that) { //Don't even bother with nodes that have errors if (that.getErrors() != null && !that.getErrors().isEmpty()) return; methodDeclaration(null, that); } private void methodDeclaration(TypeDeclaration outer, MethodDeclaration that) { Method m = that.getDeclarationModel(); if (that.getSpecifierExpression() != null) { // method(params) => expr if (outer == null) { // Not in a prototype definition. Null to do here if it's a // member in prototype style. if (prototypeStyle && m.isMember()) { return; } comment(that); out("var "); } else { // prototype definition comment(that); out(names.self(outer), "."); } out(names.name(m), "="); singleExprFunction(that.getParameterLists(), that.getSpecifierExpression().getExpression(), that.getScope()); endLine(true); share(m); } else if (outer == null) { // don't do the following in a prototype definition //Check for refinement of simple param declaration if (m == that.getScope()) { if (m.getContainer() instanceof Class && m.isClassOrInterfaceMember()) { //Declare the method just by pointing to the param function final String name = names.name(((Class)m.getContainer()).getParameter(m.getName())); if (name != null) { self((Class)m.getContainer()); out(".", names.name(m), "=", name, ";"); endLine(); } } else if (m.getContainer() instanceof Method) { //Declare the function just by forcing the name we used in the param list final String name = names.name(((Method)m.getContainer()).getParameter(m.getName())); if (names != null) { names.forceName(m, name); } } } } } @Override public void visit(MethodDefinition that) { //Don't even bother with nodes that have errors if (that.getErrors() != null && !that.getErrors().isEmpty()) return; if (!(prototypeStyle && that.getDeclarationModel().isClassOrInterfaceMember())) { comment(that); methodDefinition(that); } } private void methodDefinition(MethodDefinition that) { Method d = that.getDeclarationModel(); if (that.getParameterLists().size() == 1) { out(function, names.name(d)); ParameterList paramList = that.getParameterLists().get(0); paramList.visit(this); beginBlock(); initSelf(that.getBlock()); initParameters(paramList, null); visitStatements(that.getBlock().getStatements()); endBlock(); } else { int count=0; for (ParameterList paramList : that.getParameterLists()) { if (count==0) { out(function, names.name(d)); } else { out("return function"); } paramList.visit(this); beginBlock(); initSelf(that.getBlock()); initParameters(paramList, null); count++; } visitStatements(that.getBlock().getStatements()); for (int i=0; i < count; i++) { endBlock(); } } if (!share(d)) { out(";"); } } private void initParameters(ParameterList params, TypeDeclaration typeDecl) { for (final Parameter param : params.getParameters()) { com.redhat.ceylon.compiler.typechecker.model.Parameter pd = param.getDeclarationModel(); /*if (param instanceof ValueParameterDeclaration && ((ValueParameterDeclaration)param).getDeclarationModel().isHidden()) { //TODO support new syntax for class and method parameters //the declaration is actually different from the one we usually use out("//HIDDEN! ", pd.getName(), "(", names.name(pd), ")"); endLine(); }*/ String paramName = names.name(pd); if (param.getDefaultArgument() != null || pd.isSequenced()) { out("if(", paramName, "===undefined){", paramName, "="); if (param.getDefaultArgument() == null) { out(clAlias, "empty"); } else { final SpecifierExpression defaultExpr = param.getDefaultArgument().getSpecifierExpression(); if ((param instanceof FunctionalParameterDeclaration) && (defaultExpr instanceof LazySpecifierExpression)) { // function parameter defaulted using "=>" singleExprFunction( ((FunctionalParameterDeclaration) param).getParameterLists(), defaultExpr.getExpression(), null); } else { defaultExpr.visit(this); } } out(";}"); endLine(); } if ((typeDecl != null) && pd.isCaptured()) { self(typeDecl); out(".", paramName, "=", paramName, ";"); endLine(); } } } private void addMethodToPrototype(TypeDeclaration outer, MethodDefinition that) { Method d = that.getDeclarationModel(); if (!prototypeStyle||!d.isClassOrInterfaceMember()) return; comment(that); out(names.self(outer), ".", names.name(d), "="); methodDefinition(that); } @Override public void visit(AttributeGetterDefinition that) { Getter d = that.getDeclarationModel(); if (prototypeStyle&&d.isClassOrInterfaceMember()) return; comment(that); out("var ", names.getter(d), "=function()"); super.visit(that); if (!shareGetter(d)) { out(";"); } } private void addGetterToPrototype(TypeDeclaration outer, AttributeGetterDefinition that) { Getter d = that.getDeclarationModel(); if (!prototypeStyle||!d.isClassOrInterfaceMember()) return; comment(that); out(names.self(outer), ".", names.getter(d), "=", function, names.getter(d), "()"); super.visit(that); out(";"); } /** Exports a getter function; useful in non-prototype style. */ private boolean shareGetter(MethodOrValue d) { boolean shared = false; if (isCaptured(d)) { beginNewLine(); outerSelf(d); out(".", names.getter(d), "=", names.getter(d), ";"); endLine(); shared = true; } return shared; } @Override public void visit(AttributeSetterDefinition that) { Setter d = that.getDeclarationModel(); if (prototypeStyle&&d.isClassOrInterfaceMember()) return; comment(that); out("var ", names.setter(d.getGetter()), "=function(", names.name(d.getParameter()), ")"); super.visit(that); if (!shareSetter(d)) { out(";"); } } private void addSetterToPrototype(TypeDeclaration outer, AttributeSetterDefinition that) { Setter d = that.getDeclarationModel(); if (!prototypeStyle || !d.isClassOrInterfaceMember()) return; comment(that); String setterName = names.setter(d.getGetter()); out(names.self(outer), ".", setterName, "=", function, setterName, "(", names.name(d.getParameter()), ")"); super.visit(that); out(";"); } private boolean isCaptured(Declaration d) { if (d.isToplevel()||d.isClassOrInterfaceMember()) { //TODO: what about things nested inside control structures if (d.isShared() || d.isCaptured() ) { return true; } else { OuterVisitor ov = new OuterVisitor(d); ov.visit(root); return ov.found; } } else { return false; } } private boolean shareSetter(MethodOrValue d) { boolean shared = false; if (isCaptured(d)) { beginNewLine(); outerSelf(d); out(".", names.setter(d), "=", names.setter(d), ";"); endLine(); shared = true; } return shared; } @Override public void visit(AttributeDeclaration that) { Value d = that.getDeclarationModel(); //Check if the attribute corresponds to a class parameter //This is because of the new initializer syntax String classParam = null; if (d.getContainer() instanceof Functional) { classParam = names.name(((Functional)d.getContainer()).getParameter(d.getName())); } if (!d.isFormal()) { comment(that); SpecifierOrInitializerExpression specInitExpr = that.getSpecifierOrInitializerExpression(); if (prototypeStyle && d.isClassOrInterfaceMember()) { if ((specInitExpr != null) && !(specInitExpr instanceof LazySpecifierExpression)) { outerSelf(d); out(".", names.name(d), "="); super.visit(that); endLine(true); } else if (classParam != null) { outerSelf(d); out(".", names.name(d), "=", classParam); endLine(true); } } else if (specInitExpr instanceof LazySpecifierExpression) { out("var ", names.getter(d), "=function(){return "); int boxType = boxStart(specInitExpr.getExpression().getTerm()); specInitExpr.getExpression().visit(this); boxUnboxEnd(boxType); out(";}"); endLine(true); shareGetter(d); } else { if ((specInitExpr != null) || (classParam != null) || !d.isMember() || d.isVariable()) { generateAttributeGetter(d, specInitExpr, classParam); } if (d.isVariable()) { final String varName = names.name(d); String paramVarName = names.createTempVariable(d.getName()); out("var ", names.setter(d), "=function(", paramVarName, "){return "); out(varName, "=", paramVarName, ";};"); endLine(); shareSetter(d); } } } } private void generateAttributeGetter(MethodOrValue decl, SpecifierOrInitializerExpression expr, String param) { final String varName = names.name(decl); out("var ", varName); if (expr != null) { out("="); int boxType = boxStart(expr.getExpression().getTerm()); expr.visit(this); boxUnboxEnd(boxType); } else if (param != null) { out("=", param); } endLine(true); if (decl instanceof Method) { if (decl.isClassOrInterfaceMember() && isCaptured(decl)) { beginNewLine(); outerSelf(decl); out(".", names.name(decl), "=", names.name(decl), ";"); endLine(); } } else { if (isCaptured(decl)) { out("var ", names.getter(decl),"=function(){return ", varName, ";};"); endLine(); } else { directAccess.add(decl); } shareGetter(decl); } } private void addGetterAndSetterToPrototype(TypeDeclaration outer, AttributeDeclaration that) { Value d = that.getDeclarationModel(); if (!prototypeStyle||d.isToplevel()) return; if (!d.isFormal()) { comment(that); String classParam = null; if (d.getContainer() instanceof Functional) { classParam = names.name(((Functional)d.getContainer()).getParameter(d.getName())); } if ((that.getSpecifierOrInitializerExpression() != null) || d.isVariable() || (classParam != null)) { out(names.self(outer), ".", names.getter(d), "=", function, names.getter(d), "()"); beginBlock(); if (that.getSpecifierOrInitializerExpression() instanceof LazySpecifierExpression) { // attribute is defined by a lazy expression ("=>" syntax) initSelf(that.getScope()); out("return "); Expression expr = that.getSpecifierOrInitializerExpression().getExpression(); int boxType = boxStart(expr.getTerm()); expr.visit(this); boxUnboxEnd(boxType); out(";"); } else { out("return this.", names.name(d), ";"); } endBlockNewLine(true); } if (d.isVariable()) { String paramVarName = names.createTempVariable(d.getName()); out(names.self(outer), ".", names.setter(d), "="); out(function, names.setter(d), "(", paramVarName, ")"); beginBlock(); out("return this.", names.name(d), "=", paramVarName, ";"); endBlockNewLine(true); } } } @Override public void visit(CharLiteral that) { out(clAlias, "Character("); out(String.valueOf(that.getText().codePointAt(1))); out(")"); } /** Escapes a StringLiteral (needs to be quoted). */ String escapeStringLiteral(String s) { StringBuilder text = new StringBuilder(s); //Escape special chars for (int i=1; i < text.length()-1;i++) { switch(text.charAt(i)) { case 8:text.replace(i, i+1, "\\b"); i++; break; case 9:text.replace(i, i+1, "\\t"); i++; break; case 10:text.replace(i, i+1, "\\n"); i++; break; case 12:text.replace(i, i+1, "\\f"); i++; break; case 13:text.replace(i, i+1, "\\r"); i++; break; case 34:text.replace(i, i+1, "\\\""); i++; break; case 39:text.replace(i, i+1, "\\'"); i++; break; case 92:text.replace(i, i+1, "\\\\"); i++; break; } } return text.toString(); } @Override public void visit(StringLiteral that) { final int slen = that.getText().codePointCount(1, that.getText().length()-1); if (JsCompiler.compilingLanguageModule) { out("String$(", escapeStringLiteral(that.getText()), ",", Integer.toString(slen), ")"); } else { out(clAlias, "String(", escapeStringLiteral(that.getText()), ",", Integer.toString(slen), ")"); } } @Override public void visit(StringTemplate that) { List<StringLiteral> literals = that.getStringLiterals(); List<Expression> exprs = that.getExpressions(); out(clAlias, "StringBuilder().appendAll(["); boolean first = true; for (int i = 0; i < literals.size(); i++) { StringLiteral literal = literals.get(i); if (literal.getText().length() > 2) { if (!first) { out(","); } first = false; literal.visit(this); } if (i < exprs.size()) { if (!first) { out(","); } first = false; exprs.get(i).visit(this); out(".getString()"); } } out("]).getString()"); } @Override public void visit(FloatLiteral that) { out(clAlias, "Float(", that.getText(), ")"); } @Override public void visit(NaturalLiteral that) { char prefix = that.getText().charAt(0); if (prefix == '$' || prefix == '#') { int radix= prefix == '$' ? 2 : 16; try { out("(", new java.math.BigInteger(that.getText().substring(1), radix).toString(), ")"); } catch (NumberFormatException ex) { that.addError("Invalid numeric literal " + that.getText()); } } else { out("(", that.getText(), ")"); } } @Override public void visit(This that) { self(Util.getContainingClassOrInterface(that.getScope())); } @Override public void visit(Super that) { self(Util.getContainingClassOrInterface(that.getScope())); } @Override public void visit(Outer that) { if (prototypeStyle) { Scope scope = that.getScope(); while ((scope != null) && !(scope instanceof TypeDeclaration)) { scope = scope.getContainer(); } if (scope != null) { self((TypeDeclaration) scope); out("."); } } self(that.getTypeModel().getDeclaration()); } @Override public void visit(BaseMemberExpression that) { if (that.getErrors() != null && !that.getErrors().isEmpty()) { //Don't even bother processing a node with errors return; } Declaration decl = that.getDeclaration(); String name = decl.getName(); String pkgName = decl.getUnit().getPackage().getQualifiedNameString(); // map Ceylon true/false/null directly to JS true/false/null if ("ceylon.language".equals(pkgName)) { if ("true".equals(name) || "false".equals(name) || "null".equals(name)) { out(name); return; } } out(memberAccess(that)); } private boolean accessDirectly(Declaration d) { return !accessThroughGetter(d) || directAccess.contains(d); } private boolean accessThroughGetter(Declaration d) { return (d instanceof MethodOrValue) && !(d instanceof Method); } /** Returns true if the top-level declaration for the term is annotated "nativejs" */ private static boolean isNative(Term t) { if (t instanceof MemberOrTypeExpression) { return isNative(((MemberOrTypeExpression)t).getDeclaration()); } return false; } /** Returns true if the declaration is annotated "nativejs" */ private static boolean isNative(Declaration d) { return hasAnnotationByName(getToplevel(d), "nativejs"); } private static Declaration getToplevel(Declaration d) { while (d != null && !d.isToplevel()) { Scope s = d.getContainer(); // Skip any non-declaration elements while (s != null && !(s instanceof Declaration)) { s = s.getContainer(); } d = (Declaration) s; } return d; } private static boolean hasAnnotationByName(Declaration d, String name){ if (d != null) { for(com.redhat.ceylon.compiler.typechecker.model.Annotation annotation : d.getAnnotations()){ if(annotation.getName().equals(name)) return true; } } return false; } private void generateSafeOp(QualifiedMemberOrTypeExpression that) { boolean isMethod = that.getDeclaration() instanceof Method; String lhsVar = createRetainedTempVar("opt"); out("(", lhsVar, "="); super.visit(that); out(","); if (isMethod) { out(clAlias, "JsCallable(", lhsVar, ","); } out(lhsVar, "!==null?", lhsVar, ".", memberAccess(that), ":null)"); if (isMethod) { out(")"); } } @Override public void visit(QualifiedMemberExpression that) { //Big TODO: make sure the member is actually // refined by the current class! if (that.getMemberOperator() instanceof SafeMemberOp) { generateSafeOp(that); } else if (that.getMemberOperator() instanceof SpreadOp) { generateSpread(that); } else if (that.getDeclaration() instanceof Method && that.getSignature() == null) { //TODO right now this causes that all method invocations are done this way //we need to filter somehow to only use this pattern when the result is supposed to be a callable //looks like checking for signature is a good way (not THE way though; named arg calls don't have signature) generateCallable(that, null); } else { super.visit(that); out(".", memberAccess(that)); } } /** SpreadOp cannot be a simple function call because we need to reference the object methods directly, so it's a function */ private void generateSpread(QualifiedMemberOrTypeExpression that) { //Determine if it's a method or attribute boolean isMethod = that.getDeclaration() instanceof Method; //Define a function out("(function()"); beginBlock(); if (comment) { out("//SpreadOp at ", that.getLocation()); endLine(); } //Declare an array to store the values/references String tmplist = names.createTempVariable("lst"); out("var ", tmplist, "=[];"); endLine(); //Get an iterator String iter = names.createTempVariable("it"); out("var ", iter, "="); super.visit(that); out(".getIterator();"); endLine(); //Iterate String elem = names.createTempVariable("elem"); out("var ", elem, ";"); endLine(); out("while ((", elem, "=", iter, ".next())!==", clAlias, "getFinished())"); beginBlock(); //Add value or reference to the array out(tmplist, ".push("); if (isMethod) { out("{o:", elem, ", f:", elem, ".", memberAccess(that), "}"); } else { out(elem, ".", memberAccess(that)); } out(");"); endBlockNewLine(); //Gather arguments to pass to the callable //Return the array of values or a Callable with the arguments out("return ", clAlias); if (isMethod) { out("JsCallableList(", tmplist, ");"); } else { out("ArraySequence(", tmplist, ");"); } endBlock(); out("())"); } private void generateCallable(QualifiedMemberOrTypeExpression that, String name) { String primaryVar = createRetainedTempVar("opt"); out("(", primaryVar, "="); that.getPrimary().visit(this); out(",", clAlias, "JsCallable(", primaryVar, ",", primaryVar, "!==null?", primaryVar, ".", (name == null) ? memberAccess(that) : name, ":null))"); } /** * Checks if the given node is a MemberOrTypeExpression or QualifiedType which * represents an access to a supertype member and returns the scope of that * member or null. */ private Scope getSuperMemberScope(Node node) { Scope scope = null; if (node instanceof BaseMemberOrTypeExpression) { // Check for "Supertype::member" BaseMemberOrTypeExpression bmte = (BaseMemberOrTypeExpression) node; if (bmte.getSupertypeQualifier() != null) { scope = bmte.getDeclaration().getContainer(); } } else if (node instanceof QualifiedMemberOrTypeExpression) { // Check for "super.member" QualifiedMemberOrTypeExpression qmte = (QualifiedMemberOrTypeExpression) node; if (qmte.getPrimary() instanceof Super) { scope = qmte.getDeclaration().getContainer(); } } else if (node instanceof QualifiedType) { // Check for super.Membertype QualifiedType qtype = (QualifiedType) node; if (qtype.getOuterType() instanceof SuperType) { scope = qtype.getDeclarationModel().getContainer(); } } return scope; } private String memberAccessBase(Node node, String member, boolean qualifyBaseExpr) { StringBuilder sb = new StringBuilder(); if (qualifyBaseExpr && (node instanceof BaseMemberOrTypeExpression)) { BaseMemberOrTypeExpression bmte = (BaseMemberOrTypeExpression) node; String path = qualifiedPath(node, bmte.getDeclaration()); if (path.length() > 0) { sb.append(path); sb.append("."); } } Scope scope = getSuperMemberScope(node); if (prototypeStyle && (scope != null)) { sb.append("getT$all()['"); sb.append(scope.getQualifiedNameString()); sb.append("'].$$.prototype."); } sb.append(member); if (!prototypeStyle && (scope != null)) { sb.append(names.scopeSuffix(scope)); } //When compiling the language module we need to modify certain base type names String rval = sb.toString(); if (TypeUtils.isReservedTypename(rval)) { rval = sb.append("$").toString(); } return rval; } /** * Returns a string representing a read access to a member, as represented by * the given expression. If the expression is a QualifiedMemberOrTypeExpression * then the LHS is *not* included. If it is a BaseMemberOrTypeExpression and * qualifyBaseExpr==true then the qualified path is included. */ private String memberAccess(MemberOrTypeExpression expr, boolean qualifyBaseExpr) { Declaration decl = expr.getDeclaration(); if (isNative(decl)) { // direct access to a native element return decl.getName(); } if (accessDirectly(decl)) { // direct access, without getter return memberAccessBase(expr, names.name(decl), qualifyBaseExpr); } // access through getter boolean protoCall = prototypeStyle && (getSuperMemberScope(expr) != null); return memberAccessBase(expr, names.getter(decl), qualifyBaseExpr) + (protoCall ? ".call(this)" : "()"); } private String memberAccess(MemberOrTypeExpression expr) { return memberAccess(expr, true); } private static interface MemberAccessCallback { public void generateValue(); } /** * Generates a write access to a member, as represented by the given expression. * The given callback is responsible for generating the assigned value. * If the expression is a QualifiedMemberOrTypeExpression then the * LHS is *not* included. If it is a BaseMemberOrTypeExpression and * qualifyBaseExpr==true then the qualified path is included. */ private void generateMemberAccess(MemberOrTypeExpression expr, MemberAccessCallback callback, boolean qualifyBaseExpr) { Declaration decl = expr.getDeclaration(); boolean paren = false; if (isNative(decl)) { // direct access to a native element out(decl.getName(), "="); } else if (accessDirectly(decl)) { // direct access, without setter out(memberAccessBase(expr, names.name(decl), qualifyBaseExpr), "="); } else { // access through setter boolean protoCall = prototypeStyle && (getSuperMemberScope(expr) != null); out(memberAccessBase(expr, names.setter(decl), qualifyBaseExpr), protoCall ? ".call(this," : "("); paren = true; } callback.generateValue(); if (paren) { out(")"); } } private void generateMemberAccess(MemberOrTypeExpression expr, final String strValue, boolean qualifyBaseExpr) { generateMemberAccess(expr, new MemberAccessCallback() { @Override public void generateValue() { out(strValue); } }, qualifyBaseExpr); } @Override public void visit(BaseTypeExpression that) { if (that.getErrors() != null && !that.getErrors().isEmpty()) return; qualify(that, that.getDeclaration()); out(names.name(that.getDeclaration())); } @Override public void visit(QualifiedTypeExpression that) { if (that.getMemberOperator() instanceof SafeMemberOp) { generateCallable(that, names.name(that.getDeclaration())); } else { super.visit(that); out(".", names.name(that.getDeclaration())); } } @Override public void visit(InvocationExpression that) { if (that.getNamedArgumentList()!=null) { NamedArgumentList argList = that.getNamedArgumentList(); out("("); Map<String, String> argVarNames = invoker.defineNamedArguments(argList); TypeArguments targs = that.getPrimary() instanceof BaseTypeExpression ? ((BaseTypeExpression)that.getPrimary()).getTypeArguments() : null; that.getPrimary().visit(this); if (that.getPrimary() instanceof Tree.MemberOrTypeExpression) { Tree.MemberOrTypeExpression mte = (Tree.MemberOrTypeExpression) that.getPrimary(); if (mte.getDeclaration() instanceof Functional) { Functional f = (Functional) mte.getDeclaration(); invoker.applyNamedArguments(argList, f, argVarNames, getSuperMemberScope(mte)!=null, targs); } } out(")"); } else { PositionalArgumentList argList = that.getPositionalArgumentList(); that.getPrimary().visit(this); if (prototypeStyle && (getSuperMemberScope(that.getPrimary()) != null)) { out(".call(this"); if (!argList.getPositionalArguments().isEmpty()) { out(","); } } else { out("("); } argList.visit(this); TypeArguments targs = that.getPrimary() instanceof StaticMemberOrTypeExpression ? ((StaticMemberOrTypeExpression)that.getPrimary()).getTypeArguments() : null; if (targs != null && targs.getTypeModels() != null && !targs.getTypeModels().isEmpty()) { if (argList.getPositionalArguments().size() > 0) { out(","); } Declaration bmed = ((StaticMemberOrTypeExpression)that.getPrimary()).getDeclaration(); if (bmed instanceof Functional) { if (((Functional) bmed).getParameterLists().get(0).getParameters().size() > argList.getPositionalArguments().size() // has no comprehension && (argList.getPositionalArguments().isEmpty() || argList.getPositionalArguments().get(argList.getPositionalArguments().size()-1) instanceof Tree.Comprehension == false)) { out("undefined,"); } } if (targs != null && targs.getTypeModels() != null && !targs.getTypeModels().isEmpty()) { TypeUtils.printTypeArguments(that, targs.getTypeModels(), this); } } out(")"); } } @Override public void visit(PositionalArgumentList that) { invoker.generatePositionalArguments(that, that.getPositionalArguments(), false); } // Make sure fromTerm is compatible with toTerm by boxing it when necessary private int boxStart(Term fromTerm) { boolean fromNative = isNative(fromTerm); boolean toNative = false; ProducedType fromType = fromTerm.getTypeModel(); return boxUnboxStart(fromNative, fromType, toNative); } // Make sure fromTerm is compatible with toTerm by boxing or unboxing it when necessary int boxUnboxStart(Term fromTerm, Term toTerm) { boolean fromNative = isNative(fromTerm); boolean toNative = isNative(toTerm); ProducedType fromType = fromTerm.getTypeModel(); return boxUnboxStart(fromNative, fromType, toNative); } // Make sure fromTerm is compatible with toDecl by boxing or unboxing it when necessary int boxUnboxStart(Term fromTerm, com.redhat.ceylon.compiler.typechecker.model.TypedDeclaration toDecl) { boolean fromNative = isNative(fromTerm); boolean toNative = isNative(toDecl); ProducedType fromType = fromTerm.getTypeModel(); return boxUnboxStart(fromNative, fromType, toNative); } int boxUnboxStart(boolean fromNative, ProducedType fromType, boolean toNative) { if (fromNative != toNative) { // Box the value String fromTypeName = fromType.getProducedTypeQualifiedName(); if (fromNative) { // conversion from native value to Ceylon value if (fromTypeName.equals("ceylon.language::String")) { out(clAlias, "String("); } else if (fromTypeName.equals("ceylon.language::Integer")) { out("("); } else if (fromTypeName.equals("ceylon.language::Float")) { out(clAlias, "Float("); } else if (fromTypeName.equals("ceylon.language::Boolean")) { out("("); } else if (fromTypeName.equals("ceylon.language::Character")) { out(clAlias, "Character("); } else { return 0; } return 1; } else if ("ceylon.language::String".equals(fromTypeName) || "ceylon.language::Float".equals(fromTypeName)) { // conversion from Ceylon String or Float to native value return 2; } else { return 3; } } return 0; } void boxUnboxEnd(int boxType) { switch (boxType) { case 1: out(")"); break; case 2: out(".valueOf()"); break; default: //nothing } } @Override public void visit(ObjectArgument that) { //Don't even bother with nodes that have errors if (that.getErrors() != null && !that.getErrors().isEmpty()) return; final Class c = (Class)that.getDeclarationModel().getTypeDeclaration(); out("(function()"); beginBlock(); out("//ObjectArgument ", that.getIdentifier().getText()); location(that); endLine(); out(function, names.name(c), "()"); beginBlock(); instantiateSelf(c); referenceOuter(c); ExtendedType xt = that.getExtendedType(); final ClassBody body = that.getClassBody(); SatisfiedTypes sts = that.getSatisfiedTypes(); final List<Declaration> superDecs = new ArrayList<Declaration>(); if (!prototypeStyle) { new SuperVisitor(superDecs).visit(that.getClassBody()); } callSuperclass(xt, c, that, superDecs); callInterfaces(sts, c, that, superDecs); body.visit(this); returnSelf(c); indentLevel--; endLine(); out("}"); endLine(); typeInitialization(xt, sts, false, c, new PrototypeInitCallback() { @Override public void addToPrototypeCallback() { addToPrototype(c, body.getStatements()); } }); out("return ", names.name(c), "(new ", names.name(c), ".$$);"); endBlock(); out("())"); } @Override public void visit(AttributeArgument that) { out("(function()"); beginBlock(); out("//AttributeArgument ", that.getParameter().getName()); location(that); endLine(); Block block = that.getBlock(); SpecifierExpression specExpr = that.getSpecifierExpression(); if (specExpr != null) { out("return "); specExpr.getExpression().visit(this); out(";"); } else if (block != null) { visitStatements(block.getStatements()); } endBlock(); out("())"); } @Override public void visit(SequencedArgument that) { List<PositionalArgument> positionalArguments = that.getPositionalArguments(); boolean spread = !positionalArguments.isEmpty() && positionalArguments.get(positionalArguments.size()-1) instanceof Tree.ListedArgument == false; if (!spread) { out("["); } boolean first=true; for (PositionalArgument arg: positionalArguments) { if (!first) out(","); if (arg instanceof Tree.ListedArgument) { ((Tree.ListedArgument) arg).getExpression().visit(this); } else if(arg instanceof Tree.SpreadArgument) ((Tree.SpreadArgument) arg).getExpression().visit(this); else // comprehension arg.visit(this); first = false; } if (!spread) { out("]"); } } @Override public void visit(SequenceEnumeration that) { SequencedArgument sarg = that.getSequencedArgument(); if (sarg == null) { out(clAlias, "empty"); } else { List<PositionalArgument> positionalArguments = sarg.getPositionalArguments(); int lim = positionalArguments.size()-1; boolean spread = !positionalArguments.isEmpty() && positionalArguments.get(positionalArguments.size()-1) instanceof Tree.ListedArgument == false; int count=0; ProducedType chainedType = null; if (lim>0 || !spread) { out("["); } for (PositionalArgument expr : positionalArguments) { if (count==lim && spread) { if (lim > 0) { ProducedType seqType = TypeUtils.findSupertype(types.iterable, that.getTypeModel()); closeSequenceWithReifiedType(that, seqType.getTypeArgumentList()); out(".chain("); chainedType = TypeUtils.findSupertype(types.iterable, expr.getTypeModel()); } count--; } else { if (count > 0) { out(","); } } expr.visit(this); count++; } if (chainedType == null) { if (!spread) { closeSequenceWithReifiedType(that, that.getTypeModel().getTypeArgumentList()); } } else { out(","); TypeUtils.printTypeArguments(that, chainedType.getTypeArgumentList(), this); out(")"); } } } @Override public void visit(Comprehension that) { new ComprehensionGenerator(this, names, directAccess).generateComprehension(that); } @Override public void visit(final SpecifierStatement that) { if (prototypeStyle && (that.getSpecifierExpression() instanceof LazySpecifierExpression) && (that.getScope().getContainer() instanceof TypeDeclaration)) { // A lazy specifier expression in a class/interface. In prototype style // these should go into the prototype, so don't generate them here. return; } if (that.getBaseMemberExpression() instanceof BaseMemberExpression) { BaseMemberExpression bme = (BaseMemberExpression) that.getBaseMemberExpression(); Declaration bmeDecl = bme.getDeclaration(); if (that.getSpecifierExpression() instanceof LazySpecifierExpression) { // attr => expr; if (bmeDecl.isMember()) { qualify(that, bmeDecl); } else { out("var "); } out(names.getter(bmeDecl), "=function(){return "); that.getSpecifierExpression().visit(this); out(";};"); directAccess.remove(bmeDecl); } else { // attr = expr; if (bmeDecl instanceof MethodOrValue) { final MethodOrValue moval = (MethodOrValue)bmeDecl; if (moval.isVariable()) { // simple assignment to a variable attribute generateMemberAccess(bme, new MemberAccessCallback() { @Override public void generateValue() { int boxType = boxUnboxStart(that.getSpecifierExpression().getExpression().getTerm(), moval); that.getSpecifierExpression().getExpression().visit(GenerateJsVisitor.this); boxUnboxEnd(boxType); } }, true); out(";"); } else if (moval.isMember()) { // Specifier for a member attribute. This actually defines the // member (e.g. in shortcut refinement syntax the attribute // declaration itself can be omitted), so generate the attribute. generateAttributeGetter(moval, that.getSpecifierExpression(), null); } else { // Specifier for some other attribute, or for a method. if (prototypeStyle || (bmeDecl.isMember() && (bmeDecl instanceof Method))) { qualify(that, bmeDecl); } out(names.name(bmeDecl), "="); that.getSpecifierExpression().visit(this); out(";"); } } } } else if ((that.getBaseMemberExpression() instanceof ParameterizedExpression) && (that.getSpecifierExpression() != null)) { ParameterizedExpression paramExpr = (ParameterizedExpression) that.getBaseMemberExpression(); if (paramExpr.getPrimary() instanceof BaseMemberExpression) { // func(params) => expr; BaseMemberExpression bme = (BaseMemberExpression) paramExpr.getPrimary(); Declaration bmeDecl = bme.getDeclaration(); if (bmeDecl.isMember()) { qualify(that, bmeDecl); } else { out("var "); } out(names.name(bmeDecl), "="); singleExprFunction(paramExpr.getParameterLists(), that.getSpecifierExpression().getExpression(), that.getScope()); out(";"); } } } private void addSpecifierToPrototype(final TypeDeclaration outer, final SpecifierStatement specStmt) { if (specStmt.getBaseMemberExpression() instanceof BaseMemberExpression) { BaseMemberExpression bme = (BaseMemberExpression) specStmt.getBaseMemberExpression(); Declaration bmeDecl = bme.getDeclaration(); if (specStmt.getSpecifierExpression() instanceof LazySpecifierExpression) { // attr => expr; out(names.self(outer), ".", names.getter(bmeDecl), "=function()"); beginBlock(); initSelf(specStmt.getScope()); out("return "); specStmt.getSpecifierExpression().visit(this); out(";"); endBlockNewLine(true); } else if (bmeDecl.isMember() && (bmeDecl instanceof Value)) { // attr = expr; out(names.self(outer), ".", names.getter(bmeDecl), "=function(){return this.", names.name(bmeDecl), ";};"); endLine(); } } else if ((specStmt.getBaseMemberExpression() instanceof ParameterizedExpression) && (specStmt.getSpecifierExpression() != null)) { final ParameterizedExpression paramExpr = (ParameterizedExpression) specStmt.getBaseMemberExpression(); if (paramExpr.getPrimary() instanceof BaseMemberExpression) { // func(params) => expr; BaseMemberExpression bme = (BaseMemberExpression) paramExpr.getPrimary(); out(names.self(outer), ".", names.name(bme.getDeclaration()), "="); singleExprFunction(paramExpr.getParameterLists(), specStmt.getSpecifierExpression().getExpression(), specStmt.getScope()); out(";"); } } } @Override public void visit(final AssignOp that) { String returnValue = null; MemberOrTypeExpression lhsExpr = null; out("("); if (that.getLeftTerm() instanceof BaseMemberExpression) { BaseMemberExpression bme = (BaseMemberExpression) that.getLeftTerm(); lhsExpr = bme; Declaration bmeDecl = bme.getDeclaration(); boolean simpleSetter = hasSimpleGetterSetter(bmeDecl); if (!simpleSetter) { returnValue = memberAccess(bme); } } else if (that.getLeftTerm() instanceof QualifiedMemberExpression) { QualifiedMemberExpression qme = (QualifiedMemberExpression)that.getLeftTerm(); lhsExpr = qme; boolean simpleSetter = hasSimpleGetterSetter(qme.getDeclaration()); String lhsVar = null; if (!simpleSetter) { lhsVar = createRetainedTempVar(); out(lhsVar, "="); super.visit(qme); out(",", lhsVar, "."); returnValue = lhsVar + "." + memberAccess(qme); } else { super.visit(qme); out("."); } } generateMemberAccess(lhsExpr, new MemberAccessCallback() { @Override public void generateValue() { int boxType = boxUnboxStart(that.getRightTerm(), that.getLeftTerm()); that.getRightTerm().visit(GenerateJsVisitor.this); boxUnboxEnd(boxType); } }, true); if (returnValue != null) { out(",", returnValue); } out(")"); } /** Outputs the module name for the specified declaration. Returns true if something was output. */ boolean qualify(Node that, Declaration d) { if (d.getUnit().getPackage().getModule().isDefault()) { return false; } String path = qualifiedPath(that, d); if (path.length() > 0) { out(path, "."); } return path.length() > 0; } private String qualifiedPath(Node that, Declaration d) { return qualifiedPath(that, d, false); } private String qualifiedPath(Node that, Declaration d, boolean inProto) { boolean isMember = d.isClassOrInterfaceMember(); if (!isMember && isImported(that, d)) { return names.moduleAlias(d.getUnit().getPackage().getModule()); } else if (prototypeStyle && !inProto) { if (isMember && !(d instanceof com.redhat.ceylon.compiler.typechecker.model.Parameter && !d.isCaptured())) { TypeDeclaration id = that.getScope().getInheritingDeclaration(d); if (id == null) { //a local declaration of some kind, //perhaps in an outer scope id = (TypeDeclaration) d.getContainer(); } //else { //an inherited declaration that might be //inherited by an outer scope //} String path = ""; Scope scope = that.getScope(); // if (inProto) { // while ((scope != null) && (scope instanceof TypeDeclaration)) { // scope = scope.getContainer(); // } // } if ((scope != null) && ((that instanceof ClassDeclaration) || (that instanceof InterfaceDeclaration))) { // class/interface aliases have no own "this" scope = scope.getContainer(); } while (scope != null) { if (scope instanceof TypeDeclaration) { if (path.length() > 0) { path += '.'; } path += names.self((TypeDeclaration) scope); } else { path = ""; } if (scope == id) { break; } scope = scope.getContainer(); } return path; } } else if (d != null && (d.isShared() || inProto) && isMember) { TypeDeclaration id = that.getScope().getInheritingDeclaration(d); if (id==null) { //a shared local declaration return names.self((TypeDeclaration)d.getContainer()); } else { //an inherited declaration that might be //inherited by an outer scope return names.self(id); } } return ""; } /** Tells whether a declaration is in the same package as a node. */ private boolean isImported(Node that, Declaration d) { if (d == null) { return false; } Package p1 = d.getUnit().getPackage(); Package p2 = that == null ? null : that.getUnit().getPackage(); return !p1.equals(p2); } @Override public void visit(ExecutableStatement that) { super.visit(that); endLine(true); } /** Creates a new temporary variable which can be used immediately, even * inside an expression. The declaration for that temporary variable will be * emitted after the current Ceylon statement has been completely processed. * The resulting code is valid because JavaScript variables may be used before * they are declared. */ private String createRetainedTempVar(String baseName) { String varName = names.createTempVariable(baseName); retainedVars.add(varName); return varName; } private String createRetainedTempVar() { return createRetainedTempVar("tmp"); } // @Override // public void visit(Expression that) { // if (that.getTerm() instanceof QualifiedMemberOrTypeExpression) { // QualifiedMemberOrTypeExpression term = (QualifiedMemberOrTypeExpression) that.getTerm(); // // References to methods of types from other packages always need // // special treatment, even if prototypeStyle==false, because they // // may have been generated in prototype style. In particular, // // ceylon.language is always in prototype style. // if ((term.getDeclaration() instanceof Functional) // && (prototypeStyle || !declaredInThisPackage(term.getDeclaration()))) { // if (term.getMemberOperator() instanceof SpreadOp) { // generateSpread(term); // } else { // generateCallable(term, names.name(term.getDeclaration())); // } // return; // } // } // super.visit(that); // } @Override public void visit(Return that) { out("return "); super.visit(that); } @Override public void visit(AnnotationList that) {} void self(TypeDeclaration d) { out(names.self(d)); } /* * Output the name of a variable that receives the type parameter info, usually in the class constructor. * / private void selfTypeParameters(TypeDeclaration d) { out(selfTypeParametersString(d)); } private String selfTypeParametersString(TypeDeclaration d) { return "$$typeParms" + d.getName(); }*/ /*private void self() { out("$$"); }*/ private boolean outerSelf(Declaration d) { if (d.isToplevel()) { out("exports"); return true; } else if (d.isClassOrInterfaceMember()) { self((TypeDeclaration)d.getContainer()); return true; } return false; } private boolean declaredInCL(Declaration decl) { return decl.getUnit().getPackage().getQualifiedNameString() .startsWith("ceylon.language"); } @Override public void visit(SumOp that) { binaryOp(that, new BinaryOpGenerator() { @Override public void generate(BinaryOpTermGenerator termgen) { termgen.left(); out(".plus("); termgen.right(); out(")"); } }); } @Override public void visit(DifferenceOp that) { binaryOp(that, new BinaryOpGenerator() { @Override public void generate(BinaryOpTermGenerator termgen) { termgen.left(); out(".minus("); termgen.right(); out(")"); } }); } @Override public void visit(ProductOp that) { binaryOp(that, new BinaryOpGenerator() { @Override public void generate(BinaryOpTermGenerator termgen) { termgen.left(); out(".times("); termgen.right(); out(")"); } }); } @Override public void visit(QuotientOp that) { binaryOp(that, new BinaryOpGenerator() { @Override public void generate(BinaryOpTermGenerator termgen) { termgen.left(); out(".divided("); termgen.right(); out(")"); } }); } @Override public void visit(RemainderOp that) { binaryOp(that, new BinaryOpGenerator() { @Override public void generate(BinaryOpTermGenerator termgen) { termgen.left(); out(".remainder("); termgen.right(); out(")"); } }); } @Override public void visit(PowerOp that) { binaryOp(that, new BinaryOpGenerator() { @Override public void generate(BinaryOpTermGenerator termgen) { termgen.left(); out(".power("); termgen.right(); out(")"); } }); } @Override public void visit(AddAssignOp that) { arithmeticAssignOp(that, "plus"); } @Override public void visit(SubtractAssignOp that) { arithmeticAssignOp(that, "minus"); } @Override public void visit(MultiplyAssignOp that) { arithmeticAssignOp(that, "times"); } @Override public void visit(DivideAssignOp that) { arithmeticAssignOp(that, "divided"); } @Override public void visit(RemainderAssignOp that) { arithmeticAssignOp(that, "remainder"); } private void arithmeticAssignOp(final ArithmeticAssignmentOp that, final String functionName) { Term lhs = that.getLeftTerm(); if (lhs instanceof BaseMemberExpression) { BaseMemberExpression lhsBME = (BaseMemberExpression) lhs; Declaration lhsDecl = lhsBME.getDeclaration(); final String getLHS = memberAccess(lhsBME); out("("); generateMemberAccess(lhsBME, new MemberAccessCallback() { @Override public void generateValue() { out(getLHS, ".", functionName, "("); that.getRightTerm().visit(GenerateJsVisitor.this); out(")"); } }, true); if (!hasSimpleGetterSetter(lhsDecl)) { out(",", getLHS); } out(")"); } else if (lhs instanceof QualifiedMemberExpression) { QualifiedMemberExpression lhsQME = (QualifiedMemberExpression) lhs; if (isNative(lhsQME)) { // ($1.foo = Box($1.foo).operator($2)) out("("); lhsQME.getPrimary().visit(this); out(".", lhsQME.getDeclaration().getName()); out("="); int boxType = boxStart(lhsQME); lhsQME.getPrimary().visit(this); out(".", lhsQME.getDeclaration().getName()); boxUnboxEnd(boxType); out(".", functionName, "("); that.getRightTerm().visit(this); out("))"); } else { final String lhsPrimaryVar = createRetainedTempVar(); final String getLHS = lhsPrimaryVar + "." + memberAccess(lhsQME); out("(", lhsPrimaryVar, "="); lhsQME.getPrimary().visit(this); out(",", lhsPrimaryVar, "."); generateMemberAccess(lhsQME, new MemberAccessCallback() { @Override public void generateValue() { out(getLHS, ".", functionName, "("); that.getRightTerm().visit(GenerateJsVisitor.this); out(")"); } }, false); if (!hasSimpleGetterSetter(lhsQME.getDeclaration())) { out(",", getLHS); } out(")"); } } } @Override public void visit(final NegativeOp that) { unaryOp(that, new UnaryOpGenerator() { @Override public void generate(UnaryOpTermGenerator termgen) { TypeDeclaration d = that.getTerm().getTypeModel().getDeclaration(); if (d.inherits(types._integer)) { out("(-"); termgen.term(); out(")"); //This is not really optimal yet, since it generates //stuff like Float(-Float((5.1))) /*} else if (d.inherits(types._float)) { out(clAlias, "Float(-"); termgen.term(); out(")");*/ } else { termgen.term(); out(".getNegativeValue()"); } } }); } @Override public void visit(final PositiveOp that) { unaryOp(that, new UnaryOpGenerator() { @Override public void generate(UnaryOpTermGenerator termgen) { TypeDeclaration d = that.getTerm().getTypeModel().getDeclaration(); if (d.inherits(types._integer) || d.inherits(types._float)) { out("(+"); termgen.term(); out(")"); } else { termgen.term(); out(".getPositiveValue()"); } } }); } @Override public void visit(EqualOp that) { leftEqualsRight(that); } @Override public void visit(NotEqualOp that) { out("(!"); leftEqualsRight(that); out(")"); } @Override public void visit(NotOp that) { unaryOp(that, new UnaryOpGenerator() { @Override public void generate(UnaryOpTermGenerator termgen) { out("(!"); termgen.term(); out(")"); } }); } @Override public void visit(IdenticalOp that) { binaryOp(that, new BinaryOpGenerator() { @Override public void generate(BinaryOpTermGenerator termgen) { out("("); termgen.left(); out("==="); termgen.right(); out(")"); } }); } @Override public void visit(CompareOp that) { leftCompareRight(that); } @Override public void visit(SmallerOp that) { leftCompareRight(that); out(".equals(", clAlias, "getSmaller())"); } @Override public void visit(LargerOp that) { leftCompareRight(that); out(".equals(", clAlias, "getLarger())"); } @Override public void visit(SmallAsOp that) { out("("); leftCompareRight(that); out("!==", clAlias, "getLarger()"); out(")"); } @Override public void visit(LargeAsOp that) { out("("); leftCompareRight(that); out("!==", clAlias, "getSmaller()"); out(")"); } /** Outputs the CL equivalent of 'a==b' in JS. */ private void leftEqualsRight(BinaryOperatorExpression that) { binaryOp(that, new BinaryOpGenerator() { @Override public void generate(BinaryOpTermGenerator termgen) { termgen.left(); out(".equals("); termgen.right(); out(")"); } }); } interface UnaryOpTermGenerator { void term(); } interface UnaryOpGenerator { void generate(UnaryOpTermGenerator termgen); } private void unaryOp(final UnaryOperatorExpression that, final UnaryOpGenerator gen) { final GenerateJsVisitor visitor = this; gen.generate(new UnaryOpTermGenerator() { @Override public void term() { int boxTypeLeft = boxStart(that.getTerm()); that.getTerm().visit(visitor); boxUnboxEnd(boxTypeLeft); } }); } interface BinaryOpTermGenerator { void left(); void right(); } interface BinaryOpGenerator { void generate(BinaryOpTermGenerator termgen); } private void binaryOp(final BinaryOperatorExpression that, final BinaryOpGenerator gen) { final GenerateJsVisitor visitor = this; gen.generate(new BinaryOpTermGenerator() { @Override public void left() { int boxTypeLeft = boxStart(that.getLeftTerm()); that.getLeftTerm().visit(visitor); boxUnboxEnd(boxTypeLeft); } @Override public void right() { int boxTypeRight = boxStart(that.getRightTerm()); that.getRightTerm().visit(visitor); boxUnboxEnd(boxTypeRight); } }); } /** Outputs the CL equivalent of 'a <=> b' in JS. */ private void leftCompareRight(BinaryOperatorExpression that) { binaryOp(that, new BinaryOpGenerator() { @Override public void generate(BinaryOpTermGenerator termgen) { termgen.left(); out(".compare("); termgen.right(); out(")"); } }); } @Override public void visit(AndOp that) { binaryOp(that, new BinaryOpGenerator() { @Override public void generate(BinaryOpTermGenerator termgen) { out("("); termgen.left(); out("&&"); termgen.right(); out(")"); } }); } @Override public void visit(OrOp that) { binaryOp(that, new BinaryOpGenerator() { @Override public void generate(BinaryOpTermGenerator termgen) { out("("); termgen.left(); out("||"); termgen.right(); out(")"); } }); } @Override public void visit(final EntryOp that) { binaryOp(that, new BinaryOpGenerator() { @Override public void generate(BinaryOpTermGenerator termgen) { out(clAlias, "Entry("); termgen.left(); out(","); termgen.right(); out(","); TypeUtils.printTypeArguments(that, that.getTypeModel().getTypeArgumentList(), GenerateJsVisitor.this); out(")"); } }); } @Override public void visit(Element that) { out(".item("); that.getExpression().visit(this); out(")"); } @Override public void visit(DefaultOp that) { binaryOp(that, new BinaryOpGenerator() { @Override public void generate(BinaryOpTermGenerator termgen) { String lhsVar = createRetainedTempVar("opt"); out("(", lhsVar, "="); termgen.left(); out(",", lhsVar, "!==null?", lhsVar, ":"); termgen.right(); out(")"); } }); } @Override public void visit(ThenOp that) { binaryOp(that, new BinaryOpGenerator() { @Override public void generate(BinaryOpTermGenerator termgen) { out("("); termgen.left(); out("?"); termgen.right(); out(":null)"); } }); } @Override public void visit(IncrementOp that) { prefixIncrementOrDecrement(that.getTerm(), "getSuccessor"); } @Override public void visit(DecrementOp that) { prefixIncrementOrDecrement(that.getTerm(), "getPredecessor"); } private boolean hasSimpleGetterSetter(Declaration decl) { return !((decl instanceof Getter) || (decl instanceof Setter) || decl.isFormal()); } private void prefixIncrementOrDecrement(Term term, String functionName) { if (term instanceof BaseMemberExpression) { BaseMemberExpression bme = (BaseMemberExpression) term; boolean simpleSetter = hasSimpleGetterSetter(bme.getDeclaration()); String getMember = memberAccess(bme); String applyFunc = String.format("%s.%s()", getMember, functionName); out("("); generateMemberAccess(bme, applyFunc, true); if (!simpleSetter) { out(",", getMember); } out(")"); } else if (term instanceof QualifiedMemberExpression) { QualifiedMemberExpression qme = (QualifiedMemberExpression) term; String primaryVar = createRetainedTempVar(); String getMember = primaryVar + "." + memberAccess(qme); String applyFunc = String.format("%s.%s()", getMember, functionName); out("(", primaryVar, "="); qme.getPrimary().visit(this); out(",", primaryVar, "."); generateMemberAccess(qme, applyFunc, false); if (!hasSimpleGetterSetter(qme.getDeclaration())) { out(",", getMember); } out(")"); } } @Override public void visit(PostfixIncrementOp that) { postfixIncrementOrDecrement(that.getTerm(), "getSuccessor"); } @Override public void visit(PostfixDecrementOp that) { postfixIncrementOrDecrement(that.getTerm(), "getPredecessor"); } private void postfixIncrementOrDecrement(Term term, String functionName) { if (term instanceof BaseMemberExpression) { BaseMemberExpression bme = (BaseMemberExpression) term; String oldValueVar = createRetainedTempVar("old" + bme.getDeclaration().getName()); String applyFunc = String.format("%s.%s()", oldValueVar, functionName); out("(", oldValueVar, "=", memberAccess(bme), ","); generateMemberAccess(bme, applyFunc, true); out(",", oldValueVar, ")"); } else if (term instanceof QualifiedMemberExpression) { QualifiedMemberExpression qme = (QualifiedMemberExpression) term; String primaryVar = createRetainedTempVar(); String oldValueVar = createRetainedTempVar("old" + qme.getDeclaration().getName()); String applyFunc = String.format("%s.%s()", oldValueVar, functionName); out("(", primaryVar, "="); qme.getPrimary().visit(this); out(",", oldValueVar, "=", primaryVar, ".", memberAccess(qme), ",", primaryVar, "."); generateMemberAccess(qme, applyFunc, false); out(",", oldValueVar, ")"); } } @Override public void visit(final UnionOp that) { binaryOp(that, new BinaryOpGenerator() { @Override public void generate(BinaryOpTermGenerator termgen) { termgen.left(); out(".union("); termgen.right(); out(","); TypeUtils.printTypeArguments(that, that.getRightTerm().getTypeModel().getTypeArgumentList(), GenerateJsVisitor.this); out(")"); } }); } @Override public void visit(final IntersectionOp that) { binaryOp(that, new BinaryOpGenerator() { @Override public void generate(BinaryOpTermGenerator termgen) { termgen.left(); out(".intersection("); termgen.right(); out(","); TypeUtils.printTypeArguments(that, that.getRightTerm().getTypeModel().getTypeArgumentList(), GenerateJsVisitor.this); out(")"); } }); } @Override public void visit(final XorOp that) { binaryOp(that, new BinaryOpGenerator() { @Override public void generate(BinaryOpTermGenerator termgen) { termgen.left(); out(".exclusiveUnion("); termgen.right(); out(","); TypeUtils.printTypeArguments(that, that.getRightTerm().getTypeModel().getTypeArgumentList(), GenerateJsVisitor.this); out(")"); } }); } @Override public void visit(final ComplementOp that) { binaryOp(that, new BinaryOpGenerator() { @Override public void generate(BinaryOpTermGenerator termgen) { termgen.left(); out(".complement("); termgen.right(); out(","); TypeUtils.printTypeArguments(that, that.getRightTerm().getTypeModel().getTypeArgumentList(), GenerateJsVisitor.this); out(")"); } }); } @Override public void visit(Exists that) { unaryOp(that, new UnaryOpGenerator() { @Override public void generate(UnaryOpTermGenerator termgen) { out(clAlias, "exists("); termgen.term(); out(")"); } }); } @Override public void visit(Nonempty that) { unaryOp(that, new UnaryOpGenerator() { @Override public void generate(UnaryOpTermGenerator termgen) { out(clAlias, "nonempty("); termgen.term(); out(")"); } }); } //Don't know if we'll ever see this... @Override public void visit(ConditionList that) { System.out.println("ZOMG condition list in the wild! " + that.getLocation() + " of " + that.getUnit().getFilename()); super.visit(that); } @Override public void visit(BooleanCondition that) { int boxType = boxStart(that.getExpression().getTerm()); super.visit(that); boxUnboxEnd(boxType); } @Override public void visit(IfStatement that) { conds.generateIf(that); } @Override public void visit(WhileStatement that) { conds.generateWhile(that); } /** Generates js code to check if a term is of a certain type. We solve this in JS by * checking against all types that Type satisfies (in the case of union types, matching any * type will do, and in case of intersection types, all types must be matched). * @param term The term that is to be checked against a type * @param termString (optional) a string to be used as the term to be checked * @param type The type to check against * @param tmpvar (optional) a variable to which the term is assigned * @param negate If true, negates the generated condition */ void generateIsOfType(Term term, String termString, Type type, String tmpvar, final boolean negate) { if (negate) { out("!"); } out(clAlias, "isOfType("); if (term != null) { conds.specialConditionRHS(term, tmpvar); } else { conds.specialConditionRHS(termString, tmpvar); } out(","); TypeUtils.typeNameOrList(term, type.getTypeModel(), this, true); out(")"); } @Override public void visit(IsOp that) { generateIsOfType(that.getTerm(), null, that.getType(), null, false); } @Override public void visit(Break that) { if (continues.isEmpty()) { out("break;"); } else { Continuation top=continues.peek(); if (that.getScope()==top.getScope()) { top.useBreak(); out(top.getBreakName(), "=true; return;"); } else { out("break;"); } } } @Override public void visit(Continue that) { if (continues.isEmpty()) { out("continue;"); } else { Continuation top=continues.peek(); if (that.getScope()==top.getScope()) { top.useContinue(); out(top.getContinueName(), "=true; return;"); } else { out("continue;"); } } } @Override public void visit(final RangeOp that) { binaryOp(that, new BinaryOpGenerator() { @Override public void generate(BinaryOpTermGenerator termgen) { out(clAlias, "Range("); termgen.left(); out(","); termgen.right(); out(","); TypeUtils.printTypeArguments(that, Collections.singletonList(that.getLeftTerm().getTypeModel()), GenerateJsVisitor.this); out(")"); } }); } @Override public void visit(ForStatement that) { if (comment) { out("//'for' statement at ", that.getUnit().getFilename(), " (", that.getLocation(), ")"); if (that.getExits()) out("//EXITS!"); endLine(); } ForIterator foriter = that.getForClause().getForIterator(); final String itemVar = generateForLoop(foriter); boolean hasElse = that.getElseClause() != null && !that.getElseClause().getBlock().getStatements().isEmpty(); visitStatements(that.getForClause().getBlock().getStatements()); //If there's an else block, check for normal termination endBlock(); if (hasElse) { endLine(); out("if (", clAlias, "getFinished() === ", itemVar, ")"); encloseBlockInFunction(that.getElseClause().getBlock()); } } /** Generates code for the beginning of a "for" loop, returning the name of the variable used for the item. */ private String generateForLoop(ForIterator that) { SpecifierExpression iterable = that.getSpecifierExpression(); final String iterVar = names.createTempVariable("it"); final String itemVar; if (that instanceof ValueIterator) { itemVar = names.name(((ValueIterator)that).getVariable().getDeclarationModel()); } else { itemVar = names.createTempVariable("item"); } out("var ", iterVar, " = "); iterable.visit(this); out(".getIterator();"); endLine(); out("var ", itemVar, ";while ((", itemVar, "=", iterVar, ".next())!==", clAlias, "getFinished())"); beginBlock(); if (that instanceof ValueIterator) { directAccess.add(((ValueIterator)that).getVariable().getDeclarationModel()); } else if (that instanceof KeyValueIterator) { String keyvar = names.name(((KeyValueIterator)that).getKeyVariable().getDeclarationModel()); String valvar = names.name(((KeyValueIterator)that).getValueVariable().getDeclarationModel()); out("var ", keyvar, "=", itemVar, ".getKey();"); endLine(); out("var ", valvar, "=", itemVar, ".getItem();"); directAccess.add(((KeyValueIterator)that).getKeyVariable().getDeclarationModel()); directAccess.add(((KeyValueIterator)that).getValueVariable().getDeclarationModel()); endLine(); } return itemVar; } public void visit(InOp that) { binaryOp(that, new BinaryOpGenerator() { @Override public void generate(BinaryOpTermGenerator termgen) { termgen.right(); out(".contains("); termgen.left(); out(")"); } }); } @Override public void visit(TryCatchStatement that) { out("try"); encloseBlockInFunction(that.getTryClause().getBlock()); if (!that.getCatchClauses().isEmpty()) { String catchVarName = names.createTempVariable("ex"); out("catch(", catchVarName, ")"); beginBlock(); boolean firstCatch = true; for (CatchClause catchClause : that.getCatchClauses()) { Variable variable = catchClause.getCatchVariable().getVariable(); if (!firstCatch) { out("else "); } firstCatch = false; out("if("); generateIsOfType(null, catchVarName, variable.getType(), null, false); out(")"); if (catchClause.getBlock().getStatements().isEmpty()) { out("{}"); } else { beginBlock(); directAccess.add(variable.getDeclarationModel()); names.forceName(variable.getDeclarationModel(), catchVarName); visitStatements(catchClause.getBlock().getStatements()); endBlockNewLine(); } } out("else{throw ", catchVarName, "}"); endBlockNewLine(); } if (that.getFinallyClause() != null) { out("finally"); encloseBlockInFunction(that.getFinallyClause().getBlock()); } } @Override public void visit(Throw that) { out("throw "); if (that.getExpression() != null) { that.getExpression().visit(this); } else { out(clAlias, "Exception()"); } out(";"); } private void visitIndex(IndexExpression that) { that.getPrimary().visit(this); ElementOrRange eor = that.getElementOrRange(); if (eor instanceof Element) { out(".item("); ((Element)eor).getExpression().visit(this); out(")"); } else {//range, or spread? ElementRange er = (ElementRange)eor; Expression sexpr = er.getLength(); if (sexpr == null) { if (er.getLowerBound() == null) { out(".spanTo("); } else if (er.getUpperBound() == null) { out(".spanFrom("); } else { out(".span("); } } else { out(".segment("); } if (er.getLowerBound() != null) { er.getLowerBound().visit(this); if (er.getUpperBound() != null || sexpr != null) { out(","); } } if (er.getUpperBound() != null) { er.getUpperBound().visit(this); } else if (sexpr != null) { sexpr.visit(this); } out(")"); } } public void visit(IndexExpression that) { visitIndex(that); } /** Generates code for a case clause, as part of a switch statement. Each case * is rendered as an if. */ private void caseClause(CaseClause cc, String expvar, Term switchTerm) { out("if ("); final CaseItem item = cc.getCaseItem(); if (item instanceof IsCase) { IsCase isCaseItem = (IsCase) item; generateIsOfType(null, expvar, isCaseItem.getType(), null, false); Variable caseVar = isCaseItem.getVariable(); if (caseVar != null) { directAccess.add(caseVar.getDeclarationModel()); names.forceName(caseVar.getDeclarationModel(), expvar); } } else if (item instanceof SatisfiesCase) { item.addError("case(satisfies) not yet supported"); out("true"); } else if (item instanceof MatchCase){ boolean first = true; for (Expression exp : ((MatchCase)item).getExpressionList().getExpressions()) { if (!first) out(" || "); out(expvar, "==="); //TODO equality? /*out(".equals(");*/ exp.visit(this); //out(")==="); clAlias(); out("getTrue()"); first = false; } } else { cc.addUnexpectedError("support for case of type " + cc.getClass().getSimpleName() + " not yet implemented"); } out(") "); encloseBlockInFunction(cc.getBlock()); } @Override public void visit(SwitchStatement that) { if (comment) out("//Switch statement at ", that.getUnit().getFilename(), " (", that.getLocation(), ")"); endLine(); //Put the expression in a tmp var final String expvar = names.createTempVariable("switch"); out("var ", expvar, "="); Expression expr = that.getSwitchClause().getExpression(); expr.visit(this); endLine(true); //For each case, do an if boolean first = true; for (CaseClause cc : that.getSwitchCaseList().getCaseClauses()) { if (!first) out("else "); caseClause(cc, expvar, expr.getTerm()); first = false; } if (that.getSwitchCaseList().getElseClause() != null) { out("else "); that.getSwitchCaseList().getElseClause().visit(this); } if (comment) { out("//End switch statement at ", that.getUnit().getFilename(), " (", that.getLocation(), ")"); endLine(); } } /** Generates the code for an anonymous function defined inside an argument list. */ @Override public void visit(final FunctionArgument that) { singleExprFunction(that.getParameterLists(), that.getExpression(), that.getScope()); } private void singleExprFunction(final List<ParameterList> paramLists, final Expression expr, final Scope scope) { generateParameterLists(paramLists, scope, new ParameterListCallback() { @Override public void completeFunction() { beginBlock(); if (paramLists.size() == 1) { initSelf(scope); } initParameters(paramLists.get(paramLists.size()-1), null); out("return "); expr.visit(GenerateJsVisitor.this); out(";"); endBlock(); } }); } /** Generates the code for a function in a named argument list. */ @Override public void visit(final MethodArgument that) { generateParameterLists(that.getParameterLists(), that.getScope(), new ParameterListCallback() { @Override public void completeFunction() { Block block = that.getBlock(); SpecifierExpression specExpr = that.getSpecifierExpression(); if (specExpr != null) { out("{return "); specExpr.getExpression().visit(GenerateJsVisitor.this); out(";}"); } else if (block != null) { block.visit(GenerateJsVisitor.this); } } }); } @Override public void visit(SegmentOp that) { String rhs = names.createTempVariable(); out("(function(){var ", rhs, "="); that.getRightTerm().visit(this); endLine(true); out("if (", rhs, ">0){"); endLine(); String lhs = names.createTempVariable(); String end = names.createTempVariable(); out("var ", lhs, "="); that.getLeftTerm().visit(this); endLine(true); out("var ", end, "=", lhs); endLine(true); out("for (var i=1; i<", rhs, "; i++){", end, "=", end, ".getSuccessor();}"); endLine(); out("return ", clAlias, "Range("); out(lhs, ",", end, ")"); endLine(); out("}else return ", clAlias, "empty;}())"); } /** Generates the code for single or multiple parameter lists, with a callback function to generate the function blocks. */ private void generateParameterLists(List<ParameterList> plist, Scope scope, ParameterListCallback callback) { if (plist.size() == 1) { out(function); ParameterList paramList = plist.get(0); paramList.visit(this); callback.completeFunction(); } else { int count=0; for (ParameterList paramList : plist) { if (count==0) { out(function); } else { out("return function"); } paramList.visit(this); if (count == 0) { beginBlock(); initSelf(scope); initParameters(paramList, null); } else { out("{"); } count++; } callback.completeFunction(); for (int i=0; i < count; i++) { endBlock(false, i==count-1); } } } /** Encloses the block in a function, IF NEEDED. */ void encloseBlockInFunction(Block block) { boolean wrap=encloser.encloseBlock(block); if (wrap) { beginBlock(); Continuation c = new Continuation(block.getScope(), names); continues.push(c); out("var ", c.getContinueName(), "=false;"); endLine(); out("var ", c.getBreakName(), "=false;"); endLine(); out("var ", c.getReturnName(), "=(function()"); } block.visit(this); if (wrap) { Continuation c = continues.pop(); out("());if(", c.getReturnName(), "!==undefined){return ", c.getReturnName(), ";}"); if (c.isContinued()) { out("else if(", c.getContinueName(),"===true){continue;}"); } if (c.isBreaked()) { out("else if (", c.getBreakName(),"===true){break;}"); } endBlockNewLine(); } } private static class Continuation { private final String cvar; private final String rvar; private final String bvar; private final Scope scope; private boolean cused, bused; public Continuation(Scope scope, JsIdentifierNames names) { this.scope=scope; cvar = names.createTempVariable("cntvar"); rvar = names.createTempVariable("retvar"); bvar = names.createTempVariable("brkvar"); } public Scope getScope() { return scope; } public String getContinueName() { return cvar; } public String getBreakName() { return bvar; } public String getReturnName() { return rvar; } public void useContinue() { cused = true; } public void useBreak() { bused=true; } public boolean isContinued() { return cused; } public boolean isBreaked() { return bused; } //"isBroken" sounds really really bad in this case } private static interface ParameterListCallback { void completeFunction(); } /** This interface is used inside type initialization method. */ private interface PrototypeInitCallback { void addToPrototypeCallback(); } @Override public void visit(Tuple that) { int count = 0; SequencedArgument sarg = that.getSequencedArgument(); if (sarg == null) { out(clAlias, "empty"); } else { List<List<ProducedType>> targs = new ArrayList<List<ProducedType>>(); List<PositionalArgument> positionalArguments = sarg.getPositionalArguments(); boolean spread = !positionalArguments.isEmpty() && positionalArguments.get(positionalArguments.size()-1) instanceof Tree.ListedArgument == false; int lim = positionalArguments.size()-1; for (PositionalArgument expr : positionalArguments) { if (count > 0) { out(","); } if (count==lim && spread) { if (expr.getTypeModel().getDeclaration().inherits(types.tuple)) { expr.visit(this); } else { expr.visit(this); out(".getSequence()"); } } else { out(clAlias, "Tuple("); if (count > 0) { targs.add(0, targs.get(0).get(2).getTypeArgumentList()); } else { targs.add(that.getTypeModel().getTypeArgumentList()); } expr.visit(this); } count++; } if (!spread) { if (count > 0) { out(","); } out(clAlias, "empty"); } else { count--; } for (List<ProducedType> t : targs) { out(","); TypeUtils.printTypeArguments(that, t, this); out(")"); } } } @Override public void visit(Assertion that) { out("//assert"); location(that); String custom = "Assertion failed"; //Scan for a "doc" annotation with custom message for (Annotation ann : that.getAnnotationList().getAnnotations()) { BaseMemberExpression bme = (BaseMemberExpression)ann.getPrimary(); if ("doc".equals(bme.getDeclaration().getName())) { custom = ((Tree.ListedArgument)ann.getPositionalArgumentList().getPositionalArguments().get(0)).getExpression().getTerm().getText(); //unquote custom = custom.substring(1, custom.length() - 1); } } endLine(); StringBuilder sb = new StringBuilder(custom).append(": '"); for (int i = that.getConditionList().getToken().getTokenIndex()+1; i < that.getConditionList().getEndToken().getTokenIndex(); i++) { sb.append(tokens.get(i).getText()); } sb.append("' at ").append(that.getUnit().getFilename()).append(" (").append( that.getConditionList().getLocation()).append(")"); conds.specialConditionsAndBlock(that.getConditionList(), null, "if (!"); //escape custom = escapeStringLiteral(sb.toString()); out(") { throw ", clAlias, "Exception('", custom, "'); }"); endLine(); } void closeSequenceWithReifiedType(Node that, List<ProducedType> types) { out("].reifyCeylonType("); TypeUtils.printTypeArguments(that, types, this); out(")"); } }
src/main/java/com/redhat/ceylon/compiler/js/GenerateJsVisitor.java
package com.redhat.ceylon.compiler.js; import java.io.IOException; import java.io.Writer; import java.util.ArrayList; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.Stack; import org.antlr.runtime.CommonToken; import com.redhat.ceylon.compiler.typechecker.analyzer.AnalysisWarning; import com.redhat.ceylon.compiler.typechecker.model.Class; import com.redhat.ceylon.compiler.typechecker.model.ClassOrInterface; import com.redhat.ceylon.compiler.typechecker.model.Declaration; import com.redhat.ceylon.compiler.typechecker.model.Functional; import com.redhat.ceylon.compiler.typechecker.model.Getter; import com.redhat.ceylon.compiler.typechecker.model.ImportableScope; import com.redhat.ceylon.compiler.typechecker.model.Interface; import com.redhat.ceylon.compiler.typechecker.model.InterfaceAlias; import com.redhat.ceylon.compiler.typechecker.model.Method; import com.redhat.ceylon.compiler.typechecker.model.MethodOrValue; import com.redhat.ceylon.compiler.typechecker.model.Module; import com.redhat.ceylon.compiler.typechecker.model.Package; import com.redhat.ceylon.compiler.typechecker.model.ProducedType; import com.redhat.ceylon.compiler.typechecker.model.Scope; import com.redhat.ceylon.compiler.typechecker.model.Setter; import com.redhat.ceylon.compiler.typechecker.model.Specification; import com.redhat.ceylon.compiler.typechecker.model.TypeDeclaration; import com.redhat.ceylon.compiler.typechecker.model.TypeParameter; import com.redhat.ceylon.compiler.typechecker.model.Util; import com.redhat.ceylon.compiler.typechecker.model.Value; import com.redhat.ceylon.compiler.typechecker.tree.*; import com.redhat.ceylon.compiler.typechecker.tree.Tree.PositionalArgument; import com.redhat.ceylon.compiler.typechecker.tree.Tree.*; public class GenerateJsVisitor extends Visitor implements NaturalVisitor { private boolean indent=true; private boolean comment=true; private boolean verbose=false; private final Stack<Continuation> continues = new Stack<Continuation>(); private final EnclosingFunctionVisitor encloser = new EnclosingFunctionVisitor(); private final JsIdentifierNames names; private final Set<Declaration> directAccess = new HashSet<Declaration>(); private final RetainedVars retainedVars = new RetainedVars(); private final Map<String, String> importedModules; final ConditionGenerator conds; private final InvocationGenerator invoker; private final List<CommonToken> tokens; private final class SuperVisitor extends Visitor { private final List<Declaration> decs; private SuperVisitor(List<Declaration> decs) { this.decs = decs; } @Override public void visit(QualifiedMemberOrTypeExpression qe) { if (qe.getPrimary() instanceof Super) { decs.add(qe.getDeclaration()); } super.visit(qe); } @Override public void visit(BaseMemberOrTypeExpression that) { if (that.getSupertypeQualifier() != null) { decs.add(that.getDeclaration()); } super.visit(that); } @Override public void visit(QualifiedType that) { if (that.getOuterType() instanceof SuperType) { decs.add(that.getDeclarationModel()); } super.visit(that); } public void visit(Tree.ClassOrInterface qe) { //don't recurse if (qe instanceof ClassDefinition) { ExtendedType extType = ((ClassDefinition) qe).getExtendedType(); if (extType != null) { super.visit(extType); } } } } private final class OuterVisitor extends Visitor { boolean found = false; private Declaration dec; private OuterVisitor(Declaration dec) { this.dec = dec; } @Override public void visit(QualifiedMemberOrTypeExpression qe) { if (qe.getPrimary() instanceof Outer || qe.getPrimary() instanceof This) { if ( qe.getDeclaration().equals(dec) ) { found = true; } } super.visit(qe); } } private final TypeUtils types; private final Writer out; private final boolean prototypeStyle; private CompilationUnit root; private static String clAlias=""; private static final String function="function "; private boolean needIndent = true; private int indentLevel = 0; private static void setCLAlias(String alias) { clAlias = alias + "."; } /** Returns the module name for the language module. */ static String getClAlias() { return clAlias; } @Override public void handleException(Exception e, Node that) { that.addUnexpectedError(that.getMessage(e, this)); } public GenerateJsVisitor(Writer out, boolean prototypeStyle, JsIdentifierNames names, List<CommonToken> tokens, Map<String,String> imports, TypeUtils typeUtils) { this.out = out; this.prototypeStyle=prototypeStyle; this.names = names; conds = new ConditionGenerator(this, names, directAccess); this.tokens = tokens; importedModules = imports; types = typeUtils; invoker = new InvocationGenerator(this, names, retainedVars); } TypeUtils getTypeUtils() { return types; } /** Tells the receiver whether to add comments to certain declarations. Default is true. */ public void setAddComments(boolean flag) { comment = flag; } public boolean isAddComments() { return comment; } /** Tells the receiver whether to indent the generated code. Default is true. */ public void setIndent(boolean flag) { indent = flag; } /** Tells the receiver to be verbose (prints generated code to STDOUT in addition to writer) */ public void setVerbose(boolean flag) { verbose = flag; } /** Returns the helper component to handle naming. */ JsIdentifierNames getNames() { return names; } /** Print generated code to the Writer specified at creation time. * Automatically prints indentation first if necessary. * @param code The main code * @param codez Optional additional strings to print after the main code. */ void out(String code, String... codez) { try { if (indent && needIndent) { for (int i=0;i<indentLevel;i++) { out.write(" "); } } needIndent = false; out.write(code); for (String s : codez) { out.write(s); } if (verbose) { System.out.print(code); for (String s : codez) { System.out.print(s); } } } catch (IOException ioe) { throw new RuntimeException("Generating JS code", ioe); } } /** Prints a newline. Indentation will automatically be printed by {@link #out(String, String...)} * when the next line is started. */ void endLine() { endLine(false); } /** Prints a newline. Indentation will automatically be printed by {@link #out(String, String...)} * when the next line is started. * @param semicolon if <code>true</code> then a semicolon is printed at the end * of the previous line*/ void endLine(boolean semicolon) { if (semicolon) { out(";"); } out("\n"); needIndent = true; } /** Calls {@link #endLine()} if the current position is not already the beginning * of a line. */ void beginNewLine() { if (!needIndent) { endLine(); } } /** Increases indentation level, prints opening brace and newline. Indentation will * automatically be printed by {@link #out(String, String...)} when the next line is started. */ void beginBlock() { indentLevel++; out("{"); endLine(); } /** Decreases indentation level, prints a closing brace in new line (using * {@link #beginNewLine()}) and calls {@link #endLine()}. */ void endBlockNewLine() { endBlock(false, true); } /** Decreases indentation level, prints a closing brace in new line (using * {@link #beginNewLine()}) and calls {@link #endLine()}. * @param semicolon if <code>true</code> then prints a semicolon after the brace*/ void endBlockNewLine(boolean semicolon) { endBlock(semicolon, true); } /** Decreases indentation level and prints a closing brace in new line (using * {@link #beginNewLine()}). */ void endBlock() { endBlock(false, false); } /** Decreases indentation level and prints a closing brace in new line (using * {@link #beginNewLine()}). * @param semicolon if <code>true</code> then prints a semicolon after the brace * @param newline if <code>true</code> then additionally calls {@link #endLine()} */ void endBlock(boolean semicolon, boolean newline) { indentLevel--; beginNewLine(); out(semicolon ? "};" : "}"); if (newline) { endLine(); } } /** Prints source code location in the form "at [filename] ([location])" */ void location(Node node) { out(" at ", node.getUnit().getFilename(), " (", node.getLocation(), ")"); } @Override public void visit(CompilationUnit that) { root = that; Module clm = that.getUnit().getPackage().getModule() .getLanguageModule(); if (!JsCompiler.compilingLanguageModule) { require(clm); setCLAlias(names.moduleAlias(clm)); } for (CompilerAnnotation ca: that.getCompilerAnnotations()) { ca.visit(this); } if (that.getImportList() != null) { that.getImportList().visit(this); } visitStatements(that.getDeclarations()); } public void visit(Import that) { ImportableScope scope = that.getImportMemberOrTypeList().getImportList().getImportedScope(); if (scope instanceof Package) { require(((Package) scope).getModule()); } } private void require(Module mod) { final String path = scriptPath(mod); final String modAlias = names.moduleAlias(mod); if (importedModules.put(path, modAlias) == null) { out("var ", modAlias, "=require('", path, "');"); endLine(); } } private String scriptPath(Module mod) { StringBuilder path = new StringBuilder(mod.getNameAsString().replace('.', '/')).append('/'); if (!mod.isDefault()) { path.append(mod.getVersion()).append('/'); } path.append(mod.getNameAsString()); if (!mod.isDefault()) { path.append('-').append(mod.getVersion()); } return path.toString(); } @Override public void visit(Parameter that) { out(names.name(that.getDeclarationModel())); } @Override public void visit(ParameterList that) { out("("); boolean first=true; boolean ptypes = false; for (Parameter param: that.getParameters()) { if (!first) out(","); com.redhat.ceylon.compiler.typechecker.model.Parameter d = param.getDeclarationModel(); if (!ptypes && d.getScope() instanceof Method) { List<TypeParameter> tparms = ((Method)d.getScope()).getTypeParameters(); if (tparms != null && !tparms.isEmpty()) { for (TypeParameter tp : ((Method)d.getScope()).getTypeParameters()) { ptypes |= TypeUtils.typeContainsTypeParameter(d.getType(), tp) != null; } } } out(names.name(d)); first = false; } if (ptypes) { if (!first) out(","); out("$$$mptypes"); } out(")"); } private void visitStatements(List<? extends Statement> statements) { List<String> oldRetainedVars = retainedVars.reset(null); for (int i=0; i<statements.size(); i++) { Statement s = statements.get(i); s.visit(this); beginNewLine(); retainedVars.emitRetainedVars(this); } retainedVars.reset(oldRetainedVars); } @Override public void visit(Body that) { visitStatements(that.getStatements()); } @Override public void visit(Block that) { List<Statement> stmnts = that.getStatements(); if (stmnts.isEmpty()) { out("{}"); } else { beginBlock(); initSelf(that); visitStatements(stmnts); endBlock(); } } private void initSelf(Block block) { initSelf(block.getScope()); } private void initSelf(Scope scope) { if ((prototypeOwner != null) && ((scope instanceof MethodOrValue) || (scope instanceof TypeDeclaration) || (scope instanceof Specification))) { out("var "); self(prototypeOwner); out("=this;"); endLine(); } } private void comment(Tree.Declaration that) { if (!comment) return; endLine(); out("//", that.getNodeType(), " ", that.getDeclarationModel().getName()); location(that); endLine(); } private void var(Declaration d) { out("var ", names.name(d), "="); } private boolean share(Declaration d) { return share(d, true); } private boolean share(Declaration d, boolean excludeProtoMembers) { boolean shared = false; if (!(excludeProtoMembers && prototypeStyle && d.isClassOrInterfaceMember()) && isCaptured(d)) { beginNewLine(); outerSelf(d); out(".", names.name(d), "=", names.name(d), ";"); endLine(); shared = true; } return shared; } @Override public void visit(ClassDeclaration that) { //Don't even bother with nodes that have errors if (that.getErrors() != null && !that.getErrors().isEmpty()) { //But warnings are ok for (Message err : that.getErrors()) { if (!(err instanceof AnalysisWarning)) { return; } } } Class d = that.getDeclarationModel(); if (prototypeStyle && d.isClassOrInterfaceMember()) return; comment(that); out(function, names.name(d), "("); //Generate each parameter because we need to append one at the end for (Parameter p: that.getParameterList().getParameters()) { p.visit(this); out(", "); } self(d); out(")"); ExtendedType ext = that.getExtendedType(); TypeDeclaration aliased = ext.getType().getDeclarationModel(); out("{return "); qualify(ext.getType(), aliased); out(names.name(aliased), "("); if (ext.getInvocationExpression().getPositionalArgumentList() != null) { ext.getInvocationExpression().getPositionalArgumentList().visit(this); if (!ext.getInvocationExpression().getPositionalArgumentList().getPositionalArguments().isEmpty()) { out(","); } } else { out("/*PENDIENTE*/"); } self(d); out(");}"); endLine(); out(names.name(d), ".$$="); qualify(ext, aliased); out(names.name(aliased), ".$$;"); endLine(); share(d); } private void addClassDeclarationToPrototype(TypeDeclaration outer, ClassDeclaration that) { comment(that); TypeDeclaration dec = that.getExtendedType().getType().getTypeModel().getDeclaration(); String path = qualifiedPath(that, dec, true); if (path.length() > 0) { path += '.'; } out(names.self(outer), ".", names.name(that.getDeclarationModel()), "=", path, names.name(dec), ";"); endLine(); } @Override public void visit(InterfaceDeclaration that) { //Don't even bother with nodes that have errors if (that.getErrors() != null && !that.getErrors().isEmpty()) return; Interface d = that.getDeclarationModel(); if (prototypeStyle && d.isClassOrInterfaceMember()) return; //It's pointless declaring interface aliases outside of classes/interfaces Scope scope = that.getScope(); if (scope instanceof InterfaceAlias) { scope = scope.getContainer(); if (!(scope instanceof ClassOrInterface)) return; } comment(that); var(d); TypeDeclaration dec = that.getTypeSpecifier().getType().getTypeModel() .getDeclaration(); qualify(that,dec); out(names.name(dec), ";"); endLine(); share(d); } private void addInterfaceDeclarationToPrototype(TypeDeclaration outer, InterfaceDeclaration that) { comment(that); TypeDeclaration dec = that.getTypeSpecifier().getType().getTypeModel().getDeclaration(); String path = qualifiedPath(that, dec, true); if (path.length() > 0) { path += '.'; } out(names.self(outer), ".", names.name(that.getDeclarationModel()), "=", path, names.name(dec), ";"); endLine(); } private void addInterfaceToPrototype(ClassOrInterface type, InterfaceDefinition interfaceDef) { interfaceDefinition(interfaceDef); Interface d = interfaceDef.getDeclarationModel(); out(names.self(type), ".", names.name(d), "=", names.name(d), ";"); endLine(); } @Override public void visit(InterfaceDefinition that) { //Don't even bother with nodes that have errors if (that.getErrors() != null && !that.getErrors().isEmpty()) return; if (!(prototypeStyle && that.getDeclarationModel().isClassOrInterfaceMember())) { interfaceDefinition(that); } } private void interfaceDefinition(InterfaceDefinition that) { Interface d = that.getDeclarationModel(); comment(that); out(function, names.name(d), "("); self(d); out(")"); beginBlock(); //declareSelf(d); referenceOuter(d); final List<Declaration> superDecs = new ArrayList<Declaration>(); if (!prototypeStyle) { new SuperVisitor(superDecs).visit(that.getInterfaceBody()); } callInterfaces(that.getSatisfiedTypes(), d, that, superDecs); that.getInterfaceBody().visit(this); //returnSelf(d); endBlockNewLine(); share(d); typeInitialization(that); } private void addClassToPrototype(ClassOrInterface type, ClassDefinition classDef) { classDefinition(classDef); Class d = classDef.getDeclarationModel(); out(names.self(type), ".", names.name(d), "=", names.name(d), ";"); endLine(); } @Override public void visit(ClassDefinition that) { //Don't even bother with nodes that have errors if (that.getErrors() != null && !that.getErrors().isEmpty()) return; if (!(prototypeStyle && that.getDeclarationModel().isClassOrInterfaceMember())) { classDefinition(that); } } private void classDefinition(ClassDefinition that) { Class d = that.getDeclarationModel(); comment(that); out(function, names.name(d), "("); for (Parameter p: that.getParameterList().getParameters()) { p.visit(this); out(", "); } boolean withTargs = that.getTypeParameterList() != null && !that.getTypeParameterList().getTypeParameterDeclarations().isEmpty(); if (withTargs) { out("$$targs$$,"); } self(d); out(")"); beginBlock(); //This takes care of top-level attributes defined before the class definition out("$init$", names.name(d), "();"); endLine(); declareSelf(d); if (withTargs) { self(d); out(".$$targs$$=$$targs$$;"); endLine(); } else { //Check if any of the satisfied types have type arguments if (that.getSatisfiedTypes() != null) { for(Tree.StaticType sat : that.getSatisfiedTypes().getTypes()) { boolean first = true; List<ProducedType> targs = sat.getTypeModel().getTypeArgumentList(); if (targs != null && !targs.isEmpty()) { if (first) { self(d); out(".$$targs$$="); TypeUtils.printTypeArguments(that, targs, this); endLine(true); } else { out("/*TODO: more type arguments*/"); endLine(); } } } } } referenceOuter(d); initParameters(that.getParameterList(), d); final List<Declaration> superDecs = new ArrayList<Declaration>(); if (!prototypeStyle) { new SuperVisitor(superDecs).visit(that.getClassBody()); } callSuperclass(that.getExtendedType(), d, that, superDecs); callInterfaces(that.getSatisfiedTypes(), d, that, superDecs); that.getClassBody().visit(this); returnSelf(d); endBlockNewLine(); share(d); typeInitialization(that); } private void referenceOuter(TypeDeclaration d) { if (prototypeStyle && d.isClassOrInterfaceMember()) { self(d); out("."); outerSelf(d); out("=this;"); endLine(); } } private void copySuperMembers(TypeDeclaration typeDecl, final List<Declaration> decs, ClassOrInterface d) { if (!prototypeStyle) { for (Declaration dec: decs) { if (!typeDecl.isMember(dec)) { continue; } String suffix = names.scopeSuffix(dec.getContainer()); if (dec instanceof Value) { superGetterRef(dec,d,suffix); if (((Value) dec).isVariable()) { superSetterRef(dec,d,suffix); } } else if (dec instanceof Getter) { superGetterRef(dec,d,suffix); if (((Getter) dec).isVariable()) { superSetterRef(dec,d,suffix); } } else { superRef(dec,d,suffix); } } } } private void callSuperclass(ExtendedType extendedType, Class d, Node that, final List<Declaration> superDecs) { if (extendedType!=null) { TypeDeclaration typeDecl = extendedType.getType().getDeclarationModel(); List<PositionalArgument> argList = extendedType.getInvocationExpression() .getPositionalArgumentList().getPositionalArguments(); qualify(that, typeDecl); out(memberAccessBase(extendedType.getType(), names.name(typeDecl), false), (prototypeStyle && (getSuperMemberScope(extendedType.getType()) != null)) ? ".call(this," : "("); for (PositionalArgument arg: argList) { arg.visit(this); out(","); } //If the supertype has type arguments, add them to the call if (typeDecl.getTypeParameters() != null && !typeDecl.getTypeParameters().isEmpty()) { TypeUtils.printTypeArguments(that, extendedType.getType().getTypeArgumentList().getTypeModels(), this); out(","); } self(d); out(");"); endLine(); copySuperMembers(typeDecl, superDecs, d); } } private void callInterfaces(SatisfiedTypes satisfiedTypes, ClassOrInterface d, Node that, final List<Declaration> superDecs) { if (satisfiedTypes!=null) { for (StaticType st: satisfiedTypes.getTypes()) { TypeDeclaration typeDecl = st.getTypeModel().getDeclaration(); if (typeDecl.isAlias()) { typeDecl = typeDecl.getExtendedTypeDeclaration(); } qualify(that, typeDecl); out(names.name((ClassOrInterface)typeDecl), "("); self(d); out(");"); endLine(); copySuperMembers(typeDecl, superDecs, d); } } } /** Generates a function to initialize the specified type. */ private void typeInitialization(final Tree.Declaration type) { ExtendedType extendedType = null; SatisfiedTypes satisfiedTypes = null; boolean isInterface = false; ClassOrInterface decl = null; if (type instanceof ClassDefinition) { ClassDefinition classDef = (ClassDefinition) type; extendedType = classDef.getExtendedType(); satisfiedTypes = classDef.getSatisfiedTypes(); decl = classDef.getDeclarationModel(); } else if (type instanceof InterfaceDefinition) { satisfiedTypes = ((InterfaceDefinition) type).getSatisfiedTypes(); isInterface = true; decl = ((InterfaceDefinition) type).getDeclarationModel(); } else if (type instanceof ObjectDefinition) { ObjectDefinition objectDef = (ObjectDefinition) type; extendedType = objectDef.getExtendedType(); satisfiedTypes = objectDef.getSatisfiedTypes(); decl = (ClassOrInterface)objectDef.getDeclarationModel().getTypeDeclaration(); } final PrototypeInitCallback callback = new PrototypeInitCallback() { @Override public void addToPrototypeCallback() { if (type instanceof ClassDefinition) { addToPrototype(((ClassDefinition)type).getDeclarationModel(), ((ClassDefinition)type).getClassBody().getStatements()); } else if (type instanceof InterfaceDefinition) { addToPrototype(((InterfaceDefinition)type).getDeclarationModel(), ((InterfaceDefinition)type).getInterfaceBody().getStatements()); } } }; typeInitialization(extendedType, satisfiedTypes, isInterface, decl, callback); } /** This is now the main method to generate the type initialization code. * @param extendedType The type that is being extended. * @param satisfiedTypes The types satisfied by the type being initialized. * @param isInterface Tells whether the type being initialized is an interface * @param d The declaration for the type being initialized * @param callback A callback to add something more to the type initializer in prototype style. */ private void typeInitialization(ExtendedType extendedType, SatisfiedTypes satisfiedTypes, boolean isInterface, ClassOrInterface d, PrototypeInitCallback callback) { //Let's always use initTypeProto to avoid #113 String initFuncName = "initTypeProto"; out("function $init$", names.name(d), "()"); beginBlock(); out("if (", names.name(d), ".$$===undefined)"); beginBlock(); String qns = d.getQualifiedNameString(); if (JsCompiler.compilingLanguageModule && qns.indexOf("::") < 0) { //Language module files get compiled in default module //so they need to have this added to their qualified name qns = "ceylon.language::" + qns; } out(clAlias, initFuncName, "(", names.name(d), ",'", qns, "'"); if (extendedType != null) { out(",", typeFunctionName(extendedType.getType(), false)); } else if (!isInterface) { out(",", clAlias, "Basic"); } if (satisfiedTypes != null) { for (StaticType satType : satisfiedTypes.getTypes()) { TypeDeclaration tdec = satType.getTypeModel().getDeclaration(); if (tdec.isAlias()) { tdec = tdec.getExtendedTypeDeclaration(); } String fname = typeFunctionName(satType, true); //Actually it could be "if not in same module" if (!JsCompiler.compilingLanguageModule && declaredInCL(tdec)) { out(",", fname); } else { int idx = fname.lastIndexOf('.'); if (idx > 0) { fname = fname.substring(0, idx+1) + "$init$" + fname.substring(idx+1); } else { fname = "$init$" + fname; } out(",", fname, "()"); } } } out(");"); //The class definition needs to be inside the init function if we want forwards decls to work in prototype style if (prototypeStyle) { endLine(); callback.addToPrototypeCallback(); } endBlockNewLine(); out("return ", names.name(d), ";"); endBlockNewLine(); //If it's nested, share the init function if (outerSelf(d)) { out(".$init$", names.name(d), "=$init$", names.name(d), ";"); endLine(); } out("$init$", names.name(d), "();"); endLine(); } private String typeFunctionName(StaticType type, boolean removeAlias) { TypeDeclaration d = type.getTypeModel().getDeclaration(); if (removeAlias && d.isAlias()) { d = d.getExtendedTypeDeclaration(); } boolean inProto = prototypeStyle && (type.getScope().getContainer() instanceof TypeDeclaration); String constr = qualifiedPath(type, d, inProto); if (constr.length() > 0) { constr += '.'; } constr += memberAccessBase(type, names.name(d), false); return constr; } private void addToPrototype(ClassOrInterface d, List<Statement> statements) { if (prototypeStyle && !statements.isEmpty()) { out("(function(", names.self(d), ")"); beginBlock(); for (Statement s: statements) { addToPrototype(d, s); } endBlock(); out(")(", names.name(d), ".$$.prototype);"); endLine(); } } private ClassOrInterface prototypeOwner; private void addToPrototype(ClassOrInterface d, Statement s) { ClassOrInterface oldPrototypeOwner = prototypeOwner; prototypeOwner = d; if (s instanceof MethodDefinition) { addMethodToPrototype(d, (MethodDefinition)s); } else if (s instanceof MethodDeclaration) { methodDeclaration(d, (MethodDeclaration) s); } else if (s instanceof AttributeGetterDefinition) { addGetterToPrototype(d, (AttributeGetterDefinition)s); } else if (s instanceof AttributeSetterDefinition) { addSetterToPrototype(d, (AttributeSetterDefinition)s); } else if (s instanceof AttributeDeclaration) { addGetterAndSetterToPrototype(d, (AttributeDeclaration) s); } else if (s instanceof ClassDefinition) { addClassToPrototype(d, (ClassDefinition) s); } else if (s instanceof InterfaceDefinition) { addInterfaceToPrototype(d, (InterfaceDefinition) s); } else if (s instanceof ObjectDefinition) { addObjectToPrototype(d, (ObjectDefinition) s); } else if (s instanceof ClassDeclaration) { addClassDeclarationToPrototype(d, (ClassDeclaration) s); } else if (s instanceof InterfaceDeclaration) { addInterfaceDeclarationToPrototype(d, (InterfaceDeclaration) s); } else if (s instanceof SpecifierStatement) { addSpecifierToPrototype(d, (SpecifierStatement) s); } prototypeOwner = oldPrototypeOwner; } private void declareSelf(ClassOrInterface d) { out("if ("); self(d); out("===undefined)"); self(d); out("=new "); if (prototypeStyle && d.isClassOrInterfaceMember()) { out("this.", names.name(d), ".$$;"); } else { out(names.name(d), ".$$;"); } endLine(); /*out("var "); self(d); out("="); self(); out(";"); endLine();*/ } private void instantiateSelf(ClassOrInterface d) { out("var "); self(d); out("=new "); if (prototypeStyle && d.isClassOrInterfaceMember()) { out("this.", names.name(d), ".$$;"); } else { out(names.name(d), ".$$;"); } endLine(); } private void returnSelf(ClassOrInterface d) { out("return "); self(d); out(";"); } private void addObjectToPrototype(ClassOrInterface type, ObjectDefinition objDef) { objectDefinition(objDef); Value d = objDef.getDeclarationModel(); Class c = (Class) d.getTypeDeclaration(); out(names.self(type), ".", names.name(c), "=", names.name(c), ";"); endLine(); } @Override public void visit(ObjectDefinition that) { //Don't even bother with nodes that have errors if (that.getErrors() != null && !that.getErrors().isEmpty()) return; Value d = that.getDeclarationModel(); if (!(prototypeStyle && d.isClassOrInterfaceMember())) { objectDefinition(that); } else { Class c = (Class) d.getTypeDeclaration(); comment(that); outerSelf(d); out(".", names.name(d), "="); outerSelf(d); out(".", names.name(c), "();"); endLine(); } } private void objectDefinition(ObjectDefinition that) { Value d = that.getDeclarationModel(); boolean addToPrototype = prototypeStyle && d.isClassOrInterfaceMember(); Class c = (Class) d.getTypeDeclaration(); comment(that); out(function, names.name(c), "()"); beginBlock(); instantiateSelf(c); referenceOuter(c); final List<Declaration> superDecs = new ArrayList<Declaration>(); if (!prototypeStyle) { new SuperVisitor(superDecs).visit(that.getClassBody()); } callSuperclass(that.getExtendedType(), c, that, superDecs); callInterfaces(that.getSatisfiedTypes(), c, that, superDecs); that.getClassBody().visit(this); returnSelf(c); indentLevel--; endLine(); out("}"); endLine(); typeInitialization(that); addToPrototype(c, that.getClassBody().getStatements()); if (!addToPrototype) { out("var ", names.name(d), "=", names.name(c), "(new ", names.name(c), ".$$);"); endLine(); } out("var ", names.getter(d), "=function()"); beginBlock(); out("return "); if (addToPrototype) { out("this."); } out(names.name(d), ";"); endBlockNewLine(); if (addToPrototype || d.isShared()) { outerSelf(d); out(".", names.getter(d), "=", names.getter(d), ";"); endLine(); } } private void superRef(Declaration d, ClassOrInterface sub, String parentSuffix) { //if (d.isActual()) { self(sub); out(".", names.name(d), parentSuffix, "="); self(sub); out(".", names.name(d), ";"); endLine(); //} } private void superGetterRef(Declaration d, ClassOrInterface sub, String parentSuffix) { //if (d.isActual()) { self(sub); out(".", names.getter(d), parentSuffix, "="); self(sub); out(".", names.getter(d), ";"); endLine(); //} } private void superSetterRef(Declaration d, ClassOrInterface sub, String parentSuffix) { //if (d.isActual()) { self(sub); out(".", names.setter(d), parentSuffix, "="); self(sub); out(".", names.setter(d), ";"); endLine(); //} } @Override public void visit(MethodDeclaration that) { //Don't even bother with nodes that have errors if (that.getErrors() != null && !that.getErrors().isEmpty()) return; methodDeclaration(null, that); } private void methodDeclaration(TypeDeclaration outer, MethodDeclaration that) { Method m = that.getDeclarationModel(); if (that.getSpecifierExpression() != null) { // method(params) => expr if (outer == null) { // Not in a prototype definition. Null to do here if it's a // member in prototype style. if (prototypeStyle && m.isMember()) { return; } comment(that); out("var "); } else { // prototype definition comment(that); out(names.self(outer), "."); } out(names.name(m), "="); singleExprFunction(that.getParameterLists(), that.getSpecifierExpression().getExpression(), that.getScope()); endLine(true); share(m); } else if (outer == null) { // don't do the following in a prototype definition //Check for refinement of simple param declaration if (m == that.getScope()) { if (m.getContainer() instanceof Class && m.isClassOrInterfaceMember()) { //Declare the method just by pointing to the param function final String name = names.name(((Class)m.getContainer()).getParameter(m.getName())); if (name != null) { self((Class)m.getContainer()); out(".", names.name(m), "=", name, ";"); endLine(); } } else if (m.getContainer() instanceof Method) { //Declare the function just by forcing the name we used in the param list final String name = names.name(((Method)m.getContainer()).getParameter(m.getName())); if (names != null) { names.forceName(m, name); } } } } } @Override public void visit(MethodDefinition that) { //Don't even bother with nodes that have errors if (that.getErrors() != null && !that.getErrors().isEmpty()) return; if (!(prototypeStyle && that.getDeclarationModel().isClassOrInterfaceMember())) { comment(that); methodDefinition(that); } } private void methodDefinition(MethodDefinition that) { Method d = that.getDeclarationModel(); if (that.getParameterLists().size() == 1) { out(function, names.name(d)); ParameterList paramList = that.getParameterLists().get(0); paramList.visit(this); beginBlock(); initSelf(that.getBlock()); initParameters(paramList, null); visitStatements(that.getBlock().getStatements()); endBlock(); } else { int count=0; for (ParameterList paramList : that.getParameterLists()) { if (count==0) { out(function, names.name(d)); } else { out("return function"); } paramList.visit(this); beginBlock(); initSelf(that.getBlock()); initParameters(paramList, null); count++; } visitStatements(that.getBlock().getStatements()); for (int i=0; i < count; i++) { endBlock(); } } if (!share(d)) { out(";"); } } private void initParameters(ParameterList params, TypeDeclaration typeDecl) { for (final Parameter param : params.getParameters()) { com.redhat.ceylon.compiler.typechecker.model.Parameter pd = param.getDeclarationModel(); /*if (param instanceof ValueParameterDeclaration && ((ValueParameterDeclaration)param).getDeclarationModel().isHidden()) { //TODO support new syntax for class and method parameters //the declaration is actually different from the one we usually use out("//HIDDEN! ", pd.getName(), "(", names.name(pd), ")"); endLine(); }*/ String paramName = names.name(pd); if (param.getDefaultArgument() != null || pd.isSequenced()) { out("if(", paramName, "===undefined){", paramName, "="); if (param.getDefaultArgument() == null) { out(clAlias, "empty"); } else { final SpecifierExpression defaultExpr = param.getDefaultArgument().getSpecifierExpression(); if ((param instanceof FunctionalParameterDeclaration) && (defaultExpr instanceof LazySpecifierExpression)) { // function parameter defaulted using "=>" singleExprFunction( ((FunctionalParameterDeclaration) param).getParameterLists(), defaultExpr.getExpression(), null); } else { defaultExpr.visit(this); } } out(";}"); endLine(); } if ((typeDecl != null) && pd.isCaptured()) { self(typeDecl); out(".", paramName, "=", paramName, ";"); endLine(); } } } private void addMethodToPrototype(TypeDeclaration outer, MethodDefinition that) { Method d = that.getDeclarationModel(); if (!prototypeStyle||!d.isClassOrInterfaceMember()) return; comment(that); out(names.self(outer), ".", names.name(d), "="); methodDefinition(that); } @Override public void visit(AttributeGetterDefinition that) { Getter d = that.getDeclarationModel(); if (prototypeStyle&&d.isClassOrInterfaceMember()) return; comment(that); out("var ", names.getter(d), "=function()"); super.visit(that); if (!shareGetter(d)) { out(";"); } } private void addGetterToPrototype(TypeDeclaration outer, AttributeGetterDefinition that) { Getter d = that.getDeclarationModel(); if (!prototypeStyle||!d.isClassOrInterfaceMember()) return; comment(that); out(names.self(outer), ".", names.getter(d), "=", function, names.getter(d), "()"); super.visit(that); out(";"); } /** Exports a getter function; useful in non-prototype style. */ private boolean shareGetter(MethodOrValue d) { boolean shared = false; if (isCaptured(d)) { beginNewLine(); outerSelf(d); out(".", names.getter(d), "=", names.getter(d), ";"); endLine(); shared = true; } return shared; } @Override public void visit(AttributeSetterDefinition that) { Setter d = that.getDeclarationModel(); if (prototypeStyle&&d.isClassOrInterfaceMember()) return; comment(that); out("var ", names.setter(d.getGetter()), "=function(", names.name(d.getParameter()), ")"); super.visit(that); if (!shareSetter(d)) { out(";"); } } private void addSetterToPrototype(TypeDeclaration outer, AttributeSetterDefinition that) { Setter d = that.getDeclarationModel(); if (!prototypeStyle || !d.isClassOrInterfaceMember()) return; comment(that); String setterName = names.setter(d.getGetter()); out(names.self(outer), ".", setterName, "=", function, setterName, "(", names.name(d.getParameter()), ")"); super.visit(that); out(";"); } private boolean isCaptured(Declaration d) { if (d.isToplevel()||d.isClassOrInterfaceMember()) { //TODO: what about things nested inside control structures if (d.isShared() || d.isCaptured() ) { return true; } else { OuterVisitor ov = new OuterVisitor(d); ov.visit(root); return ov.found; } } else { return false; } } private boolean shareSetter(MethodOrValue d) { boolean shared = false; if (isCaptured(d)) { beginNewLine(); outerSelf(d); out(".", names.setter(d), "=", names.setter(d), ";"); endLine(); shared = true; } return shared; } @Override public void visit(AttributeDeclaration that) { Value d = that.getDeclarationModel(); //Check if the attribute corresponds to a class parameter //This is because of the new initializer syntax String classParam = null; if (d.getContainer() instanceof Functional) { classParam = names.name(((Functional)d.getContainer()).getParameter(d.getName())); } if (!d.isFormal()) { comment(that); SpecifierOrInitializerExpression specInitExpr = that.getSpecifierOrInitializerExpression(); if (prototypeStyle && d.isClassOrInterfaceMember()) { if ((specInitExpr != null) && !(specInitExpr instanceof LazySpecifierExpression)) { outerSelf(d); out(".", names.name(d), "="); super.visit(that); endLine(true); } else if (classParam != null) { outerSelf(d); out(".", names.name(d), "=", classParam); endLine(true); } } else if (specInitExpr instanceof LazySpecifierExpression) { out("var ", names.getter(d), "=function(){return "); int boxType = boxStart(specInitExpr.getExpression().getTerm()); specInitExpr.getExpression().visit(this); boxUnboxEnd(boxType); out(";}"); endLine(true); shareGetter(d); } else { if ((specInitExpr != null) || (classParam != null) || !d.isMember() || d.isVariable()) { generateAttributeGetter(d, specInitExpr, classParam); } if (d.isVariable()) { final String varName = names.name(d); String paramVarName = names.createTempVariable(d.getName()); out("var ", names.setter(d), "=function(", paramVarName, "){return "); out(varName, "=", paramVarName, ";};"); endLine(); shareSetter(d); } } } } private void generateAttributeGetter(MethodOrValue decl, SpecifierOrInitializerExpression expr, String param) { final String varName = names.name(decl); out("var ", varName); if (expr != null) { out("="); int boxType = boxStart(expr.getExpression().getTerm()); expr.visit(this); boxUnboxEnd(boxType); } else if (param != null) { out("=", param); } endLine(true); if (decl instanceof Method) { if (decl.isClassOrInterfaceMember() && isCaptured(decl)) { beginNewLine(); outerSelf(decl); out(".", names.name(decl), "=", names.name(decl), ";"); endLine(); } } else { if (isCaptured(decl)) { out("var ", names.getter(decl),"=function(){return ", varName, ";};"); endLine(); } else { directAccess.add(decl); } shareGetter(decl); } } private void addGetterAndSetterToPrototype(TypeDeclaration outer, AttributeDeclaration that) { Value d = that.getDeclarationModel(); if (!prototypeStyle||d.isToplevel()) return; if (!d.isFormal()) { comment(that); String classParam = null; if (d.getContainer() instanceof Functional) { classParam = names.name(((Functional)d.getContainer()).getParameter(d.getName())); } if ((that.getSpecifierOrInitializerExpression() != null) || d.isVariable() || (classParam != null)) { out(names.self(outer), ".", names.getter(d), "=", function, names.getter(d), "()"); beginBlock(); if (that.getSpecifierOrInitializerExpression() instanceof LazySpecifierExpression) { // attribute is defined by a lazy expression ("=>" syntax) initSelf(that.getScope()); out("return "); Expression expr = that.getSpecifierOrInitializerExpression().getExpression(); int boxType = boxStart(expr.getTerm()); expr.visit(this); boxUnboxEnd(boxType); out(";"); } else { out("return this.", names.name(d), ";"); } endBlockNewLine(true); } if (d.isVariable()) { String paramVarName = names.createTempVariable(d.getName()); out(names.self(outer), ".", names.setter(d), "="); out(function, names.setter(d), "(", paramVarName, ")"); beginBlock(); out("return this.", names.name(d), "=", paramVarName, ";"); endBlockNewLine(true); } } } @Override public void visit(CharLiteral that) { out(clAlias, "Character("); out(String.valueOf(that.getText().codePointAt(1))); out(")"); } /** Escapes a StringLiteral (needs to be quoted). */ String escapeStringLiteral(String s) { StringBuilder text = new StringBuilder(s); //Escape special chars for (int i=1; i < text.length()-1;i++) { switch(text.charAt(i)) { case 8:text.replace(i, i+1, "\\b"); i++; break; case 9:text.replace(i, i+1, "\\t"); i++; break; case 10:text.replace(i, i+1, "\\n"); i++; break; case 12:text.replace(i, i+1, "\\f"); i++; break; case 13:text.replace(i, i+1, "\\r"); i++; break; case 34:text.replace(i, i+1, "\\\""); i++; break; case 39:text.replace(i, i+1, "\\'"); i++; break; case 92:text.replace(i, i+1, "\\\\"); i++; break; } } return text.toString(); } @Override public void visit(StringLiteral that) { final int slen = that.getText().codePointCount(1, that.getText().length()-1); if (JsCompiler.compilingLanguageModule) { out("String$(", escapeStringLiteral(that.getText()), ",", Integer.toString(slen), ")"); } else { out(clAlias, "String(", escapeStringLiteral(that.getText()), ",", Integer.toString(slen), ")"); } } @Override public void visit(StringTemplate that) { List<StringLiteral> literals = that.getStringLiterals(); List<Expression> exprs = that.getExpressions(); out(clAlias, "StringBuilder().appendAll(["); boolean first = true; for (int i = 0; i < literals.size(); i++) { StringLiteral literal = literals.get(i); if (literal.getText().length() > 2) { if (!first) { out(","); } first = false; literal.visit(this); } if (i < exprs.size()) { if (!first) { out(","); } first = false; exprs.get(i).visit(this); out(".getString()"); } } out("]).getString()"); } @Override public void visit(FloatLiteral that) { out(clAlias, "Float(", that.getText(), ")"); } @Override public void visit(NaturalLiteral that) { char prefix = that.getText().charAt(0); if (prefix == '$' || prefix == '#') { int radix= prefix == '$' ? 2 : 16; try { out("(", new java.math.BigInteger(that.getText().substring(1), radix).toString(), ")"); } catch (NumberFormatException ex) { that.addError("Invalid numeric literal " + that.getText()); } } else { out("(", that.getText(), ")"); } } @Override public void visit(This that) { self(Util.getContainingClassOrInterface(that.getScope())); } @Override public void visit(Super that) { self(Util.getContainingClassOrInterface(that.getScope())); } @Override public void visit(Outer that) { if (prototypeStyle) { Scope scope = that.getScope(); while ((scope != null) && !(scope instanceof TypeDeclaration)) { scope = scope.getContainer(); } if (scope != null) { self((TypeDeclaration) scope); out("."); } } self(that.getTypeModel().getDeclaration()); } @Override public void visit(BaseMemberExpression that) { if (that.getErrors() != null && !that.getErrors().isEmpty()) { //Don't even bother processing a node with errors return; } Declaration decl = that.getDeclaration(); String name = decl.getName(); String pkgName = decl.getUnit().getPackage().getQualifiedNameString(); // map Ceylon true/false/null directly to JS true/false/null if ("ceylon.language".equals(pkgName)) { if ("true".equals(name) || "false".equals(name) || "null".equals(name)) { out(name); return; } } out(memberAccess(that)); } private boolean accessDirectly(Declaration d) { return !accessThroughGetter(d) || directAccess.contains(d); } private boolean accessThroughGetter(Declaration d) { return (d instanceof MethodOrValue) && !(d instanceof Method); } /** Returns true if the top-level declaration for the term is annotated "nativejs" */ private static boolean isNative(Term t) { if (t instanceof MemberOrTypeExpression) { return isNative(((MemberOrTypeExpression)t).getDeclaration()); } return false; } /** Returns true if the declaration is annotated "nativejs" */ private static boolean isNative(Declaration d) { return hasAnnotationByName(getToplevel(d), "nativejs"); } private static Declaration getToplevel(Declaration d) { while (d != null && !d.isToplevel()) { Scope s = d.getContainer(); // Skip any non-declaration elements while (s != null && !(s instanceof Declaration)) { s = s.getContainer(); } d = (Declaration) s; } return d; } private static boolean hasAnnotationByName(Declaration d, String name){ if (d != null) { for(com.redhat.ceylon.compiler.typechecker.model.Annotation annotation : d.getAnnotations()){ if(annotation.getName().equals(name)) return true; } } return false; } private void generateSafeOp(QualifiedMemberOrTypeExpression that) { boolean isMethod = that.getDeclaration() instanceof Method; String lhsVar = createRetainedTempVar("opt"); out("(", lhsVar, "="); super.visit(that); out(","); if (isMethod) { out(clAlias, "JsCallable(", lhsVar, ","); } out(lhsVar, "!==null?", lhsVar, ".", memberAccess(that), ":null)"); if (isMethod) { out(")"); } } @Override public void visit(QualifiedMemberExpression that) { //Big TODO: make sure the member is actually // refined by the current class! if (that.getMemberOperator() instanceof SafeMemberOp) { generateSafeOp(that); } else if (that.getMemberOperator() instanceof SpreadOp) { generateSpread(that); } else if (that.getDeclaration() instanceof Method && that.getSignature() == null) { //TODO right now this causes that all method invocations are done this way //we need to filter somehow to only use this pattern when the result is supposed to be a callable //looks like checking for signature is a good way (not THE way though; named arg calls don't have signature) generateCallable(that, null); } else { super.visit(that); out(".", memberAccess(that)); } } /** SpreadOp cannot be a simple function call because we need to reference the object methods directly, so it's a function */ private void generateSpread(QualifiedMemberOrTypeExpression that) { //Determine if it's a method or attribute boolean isMethod = that.getDeclaration() instanceof Method; //Define a function out("(function()"); beginBlock(); if (comment) { out("//SpreadOp at ", that.getLocation()); endLine(); } //Declare an array to store the values/references String tmplist = names.createTempVariable("lst"); out("var ", tmplist, "=[];"); endLine(); //Get an iterator String iter = names.createTempVariable("it"); out("var ", iter, "="); super.visit(that); out(".getIterator();"); endLine(); //Iterate String elem = names.createTempVariable("elem"); out("var ", elem, ";"); endLine(); out("while ((", elem, "=", iter, ".next())!==", clAlias, "getFinished())"); beginBlock(); //Add value or reference to the array out(tmplist, ".push("); if (isMethod) { out("{o:", elem, ", f:", elem, ".", memberAccess(that), "}"); } else { out(elem, ".", memberAccess(that)); } out(");"); endBlockNewLine(); //Gather arguments to pass to the callable //Return the array of values or a Callable with the arguments out("return ", clAlias); if (isMethod) { out("JsCallableList(", tmplist, ");"); } else { out("ArraySequence(", tmplist, ");"); } endBlock(); out("())"); } private void generateCallable(QualifiedMemberOrTypeExpression that, String name) { String primaryVar = createRetainedTempVar("opt"); out("(", primaryVar, "="); that.getPrimary().visit(this); out(",", clAlias, "JsCallable(", primaryVar, ",", primaryVar, "!==null?", primaryVar, ".", (name == null) ? memberAccess(that) : name, ":null))"); } /** * Checks if the given node is a MemberOrTypeExpression or QualifiedType which * represents an access to a supertype member and returns the scope of that * member or null. */ private Scope getSuperMemberScope(Node node) { Scope scope = null; if (node instanceof BaseMemberOrTypeExpression) { // Check for "Supertype::member" BaseMemberOrTypeExpression bmte = (BaseMemberOrTypeExpression) node; if (bmte.getSupertypeQualifier() != null) { scope = bmte.getDeclaration().getContainer(); } } else if (node instanceof QualifiedMemberOrTypeExpression) { // Check for "super.member" QualifiedMemberOrTypeExpression qmte = (QualifiedMemberOrTypeExpression) node; if (qmte.getPrimary() instanceof Super) { scope = qmte.getDeclaration().getContainer(); } } else if (node instanceof QualifiedType) { // Check for super.Membertype QualifiedType qtype = (QualifiedType) node; if (qtype.getOuterType() instanceof SuperType) { scope = qtype.getDeclarationModel().getContainer(); } } return scope; } private String memberAccessBase(Node node, String member, boolean qualifyBaseExpr) { StringBuilder sb = new StringBuilder(); if (qualifyBaseExpr && (node instanceof BaseMemberOrTypeExpression)) { BaseMemberOrTypeExpression bmte = (BaseMemberOrTypeExpression) node; String path = qualifiedPath(node, bmte.getDeclaration()); if (path.length() > 0) { sb.append(path); sb.append("."); } } Scope scope = getSuperMemberScope(node); if (prototypeStyle && (scope != null)) { sb.append("getT$all()['"); sb.append(scope.getQualifiedNameString()); sb.append("'].$$.prototype."); } sb.append(member); if (!prototypeStyle && (scope != null)) { sb.append(names.scopeSuffix(scope)); } //When compiling the language module we need to modify certain base type names String rval = sb.toString(); if (TypeUtils.isReservedTypename(rval)) { rval = sb.append("$").toString(); } return rval; } /** * Returns a string representing a read access to a member, as represented by * the given expression. If the expression is a QualifiedMemberOrTypeExpression * then the LHS is *not* included. If it is a BaseMemberOrTypeExpression and * qualifyBaseExpr==true then the qualified path is included. */ private String memberAccess(MemberOrTypeExpression expr, boolean qualifyBaseExpr) { Declaration decl = expr.getDeclaration(); if (isNative(decl)) { // direct access to a native element return decl.getName(); } if (accessDirectly(decl)) { // direct access, without getter return memberAccessBase(expr, names.name(decl), qualifyBaseExpr); } // access through getter boolean protoCall = prototypeStyle && (getSuperMemberScope(expr) != null); return memberAccessBase(expr, names.getter(decl), qualifyBaseExpr) + (protoCall ? ".call(this)" : "()"); } private String memberAccess(MemberOrTypeExpression expr) { return memberAccess(expr, true); } private static interface MemberAccessCallback { public void generateValue(); } /** * Generates a write access to a member, as represented by the given expression. * The given callback is responsible for generating the assigned value. * If the expression is a QualifiedMemberOrTypeExpression then the * LHS is *not* included. If it is a BaseMemberOrTypeExpression and * qualifyBaseExpr==true then the qualified path is included. */ private void generateMemberAccess(MemberOrTypeExpression expr, MemberAccessCallback callback, boolean qualifyBaseExpr) { Declaration decl = expr.getDeclaration(); boolean paren = false; if (isNative(decl)) { // direct access to a native element out(decl.getName(), "="); } else if (accessDirectly(decl)) { // direct access, without setter out(memberAccessBase(expr, names.name(decl), qualifyBaseExpr), "="); } else { // access through setter boolean protoCall = prototypeStyle && (getSuperMemberScope(expr) != null); out(memberAccessBase(expr, names.setter(decl), qualifyBaseExpr), protoCall ? ".call(this," : "("); paren = true; } callback.generateValue(); if (paren) { out(")"); } } private void generateMemberAccess(MemberOrTypeExpression expr, final String strValue, boolean qualifyBaseExpr) { generateMemberAccess(expr, new MemberAccessCallback() { @Override public void generateValue() { out(strValue); } }, qualifyBaseExpr); } @Override public void visit(BaseTypeExpression that) { if (that.getErrors() != null && !that.getErrors().isEmpty()) return; qualify(that, that.getDeclaration()); out(names.name(that.getDeclaration())); } @Override public void visit(QualifiedTypeExpression that) { if (that.getMemberOperator() instanceof SafeMemberOp) { generateCallable(that, names.name(that.getDeclaration())); } else { super.visit(that); out(".", names.name(that.getDeclaration())); } } @Override public void visit(InvocationExpression that) { if (that.getNamedArgumentList()!=null) { NamedArgumentList argList = that.getNamedArgumentList(); out("("); Map<String, String> argVarNames = invoker.defineNamedArguments(argList); TypeArguments targs = that.getPrimary() instanceof BaseTypeExpression ? ((BaseTypeExpression)that.getPrimary()).getTypeArguments() : null; that.getPrimary().visit(this); if (that.getPrimary() instanceof Tree.MemberOrTypeExpression) { Tree.MemberOrTypeExpression mte = (Tree.MemberOrTypeExpression) that.getPrimary(); if (mte.getDeclaration() instanceof Functional) { Functional f = (Functional) mte.getDeclaration(); invoker.applyNamedArguments(argList, f, argVarNames, getSuperMemberScope(mte)!=null, targs); } } out(")"); } else { PositionalArgumentList argList = that.getPositionalArgumentList(); that.getPrimary().visit(this); if (prototypeStyle && (getSuperMemberScope(that.getPrimary()) != null)) { out(".call(this"); if (!argList.getPositionalArguments().isEmpty()) { out(","); } } else { out("("); } argList.visit(this); TypeArguments targs = that.getPrimary() instanceof StaticMemberOrTypeExpression ? ((StaticMemberOrTypeExpression)that.getPrimary()).getTypeArguments() : null; if (targs != null && targs.getTypeModels() != null && !targs.getTypeModels().isEmpty()) { if (argList.getPositionalArguments().size() > 0) { out(","); } Declaration bmed = ((StaticMemberOrTypeExpression)that.getPrimary()).getDeclaration(); if (bmed instanceof Functional) { if (((Functional) bmed).getParameterLists().get(0).getParameters().size() > argList.getPositionalArguments().size() // has no comprehension && (argList.getPositionalArguments().isEmpty() || argList.getPositionalArguments().get(argList.getPositionalArguments().size()-1) instanceof Tree.Comprehension == false)) { out("undefined,"); } } if (targs != null && targs.getTypeModels() != null && !targs.getTypeModels().isEmpty()) { TypeUtils.printTypeArguments(that, targs.getTypeModels(), this); } } out(")"); } } @Override public void visit(PositionalArgumentList that) { invoker.generatePositionalArguments(that, that.getPositionalArguments(), false); } // Make sure fromTerm is compatible with toTerm by boxing it when necessary private int boxStart(Term fromTerm) { boolean fromNative = isNative(fromTerm); boolean toNative = false; ProducedType fromType = fromTerm.getTypeModel(); return boxUnboxStart(fromNative, fromType, toNative); } // Make sure fromTerm is compatible with toTerm by boxing or unboxing it when necessary int boxUnboxStart(Term fromTerm, Term toTerm) { boolean fromNative = isNative(fromTerm); boolean toNative = isNative(toTerm); ProducedType fromType = fromTerm.getTypeModel(); return boxUnboxStart(fromNative, fromType, toNative); } // Make sure fromTerm is compatible with toDecl by boxing or unboxing it when necessary int boxUnboxStart(Term fromTerm, com.redhat.ceylon.compiler.typechecker.model.TypedDeclaration toDecl) { boolean fromNative = isNative(fromTerm); boolean toNative = isNative(toDecl); ProducedType fromType = fromTerm.getTypeModel(); return boxUnboxStart(fromNative, fromType, toNative); } int boxUnboxStart(boolean fromNative, ProducedType fromType, boolean toNative) { if (fromNative != toNative) { // Box the value String fromTypeName = fromType.getProducedTypeQualifiedName(); if (fromNative) { // conversion from native value to Ceylon value if (fromTypeName.equals("ceylon.language::String")) { out(clAlias, "String("); } else if (fromTypeName.equals("ceylon.language::Integer")) { out("("); } else if (fromTypeName.equals("ceylon.language::Float")) { out(clAlias, "Float("); } else if (fromTypeName.equals("ceylon.language::Boolean")) { out("("); } else if (fromTypeName.equals("ceylon.language::Character")) { out(clAlias, "Character("); } else { return 0; } return 1; } else if ("ceylon.language::String".equals(fromTypeName) || "ceylon.language::Float".equals(fromTypeName)) { // conversion from Ceylon String or Float to native value return 2; } else { return 3; } } return 0; } void boxUnboxEnd(int boxType) { switch (boxType) { case 1: out(")"); break; case 2: out(".valueOf()"); break; default: //nothing } } @Override public void visit(ObjectArgument that) { //Don't even bother with nodes that have errors if (that.getErrors() != null && !that.getErrors().isEmpty()) return; final Class c = (Class)that.getDeclarationModel().getTypeDeclaration(); out("(function()"); beginBlock(); out("//ObjectArgument ", that.getIdentifier().getText()); location(that); endLine(); out(function, names.name(c), "()"); beginBlock(); instantiateSelf(c); referenceOuter(c); ExtendedType xt = that.getExtendedType(); final ClassBody body = that.getClassBody(); SatisfiedTypes sts = that.getSatisfiedTypes(); final List<Declaration> superDecs = new ArrayList<Declaration>(); if (!prototypeStyle) { new SuperVisitor(superDecs).visit(that.getClassBody()); } callSuperclass(xt, c, that, superDecs); callInterfaces(sts, c, that, superDecs); body.visit(this); returnSelf(c); indentLevel--; endLine(); out("}"); endLine(); typeInitialization(xt, sts, false, c, new PrototypeInitCallback() { @Override public void addToPrototypeCallback() { addToPrototype(c, body.getStatements()); } }); out("return ", names.name(c), "(new ", names.name(c), ".$$);"); endBlock(); out("())"); } @Override public void visit(AttributeArgument that) { out("(function()"); beginBlock(); out("//AttributeArgument ", that.getParameter().getName()); location(that); endLine(); Block block = that.getBlock(); SpecifierExpression specExpr = that.getSpecifierExpression(); if (specExpr != null) { out("return "); specExpr.getExpression().visit(this); out(";"); } else if (block != null) { visitStatements(block.getStatements()); } endBlock(); out("())"); } @Override public void visit(SequencedArgument that) { List<PositionalArgument> positionalArguments = that.getPositionalArguments(); boolean spread = !positionalArguments.isEmpty() && positionalArguments.get(positionalArguments.size()-1) instanceof Tree.ListedArgument == false; if (!spread) { out("["); } boolean first=true; for (PositionalArgument arg: positionalArguments) { if (!first) out(","); if (arg instanceof Tree.ListedArgument) { ((Tree.ListedArgument) arg).getExpression().visit(this); } else if(arg instanceof Tree.SpreadArgument) ((Tree.SpreadArgument) arg).getExpression().visit(this); else // comprehension arg.visit(this); first = false; } if (!spread) { out("]"); } } @Override public void visit(SequenceEnumeration that) { SequencedArgument sarg = that.getSequencedArgument(); if (sarg == null) { out(clAlias, "empty"); } else { List<PositionalArgument> positionalArguments = sarg.getPositionalArguments(); int lim = positionalArguments.size()-1; boolean spread = !positionalArguments.isEmpty() && positionalArguments.get(positionalArguments.size()-1) instanceof Tree.ListedArgument == false; int count=0; ProducedType chainedType = null; if (lim>0 || !spread) { out("["); } for (PositionalArgument expr : positionalArguments) { if (count==lim && spread) { if (lim > 0) { ProducedType seqType = TypeUtils.findSupertype(types.iterable, that.getTypeModel()); closeSequenceWithReifiedType(that, seqType.getTypeArgumentList()); out(".chain("); chainedType = TypeUtils.findSupertype(types.iterable, expr.getTypeModel()); } count--; } else { if (count > 0) { out(","); } } expr.visit(this); count++; } if (chainedType == null) { if (!spread) { closeSequenceWithReifiedType(that, that.getTypeModel().getTypeArgumentList()); } } else { out(","); TypeUtils.printTypeArguments(that, chainedType.getTypeArgumentList(), this); out(")"); } } } @Override public void visit(Comprehension that) { new ComprehensionGenerator(this, names, directAccess).generateComprehension(that); } @Override public void visit(final SpecifierStatement that) { if (prototypeStyle && (that.getSpecifierExpression() instanceof LazySpecifierExpression) && (that.getScope().getContainer() instanceof TypeDeclaration)) { // A lazy specifier expression in a class/interface. In prototype style // these should go into the prototype, so don't generate them here. return; } if (that.getBaseMemberExpression() instanceof BaseMemberExpression) { BaseMemberExpression bme = (BaseMemberExpression) that.getBaseMemberExpression(); Declaration bmeDecl = bme.getDeclaration(); if (that.getSpecifierExpression() instanceof LazySpecifierExpression) { // attr => expr; if (bmeDecl.isMember()) { qualify(that, bmeDecl); } else { out("var "); } out(names.getter(bmeDecl), "=function(){return "); that.getSpecifierExpression().visit(this); out(";};"); directAccess.remove(bmeDecl); } else { // attr = expr; if (bmeDecl instanceof MethodOrValue) { final MethodOrValue moval = (MethodOrValue)bmeDecl; if (moval.isVariable()) { // simple assignment to a variable attribute generateMemberAccess(bme, new MemberAccessCallback() { @Override public void generateValue() { int boxType = boxUnboxStart(that.getSpecifierExpression().getExpression().getTerm(), moval); that.getSpecifierExpression().getExpression().visit(GenerateJsVisitor.this); boxUnboxEnd(boxType); } }, true); out(";"); } else if (moval.isMember()) { // Specifier for a member attribute. This actually defines the // member (e.g. in shortcut refinement syntax the attribute // declaration itself can be omitted), so generate the attribute. generateAttributeGetter(moval, that.getSpecifierExpression(), null); } else { // Specifier for some other attribute, or for a method. if (prototypeStyle || (bmeDecl.isMember() && (bmeDecl instanceof Method))) { qualify(that, bmeDecl); } out(names.name(bmeDecl), "="); that.getSpecifierExpression().visit(this); out(";"); } } } } else if ((that.getBaseMemberExpression() instanceof ParameterizedExpression) && (that.getSpecifierExpression() != null)) { ParameterizedExpression paramExpr = (ParameterizedExpression) that.getBaseMemberExpression(); if (paramExpr.getPrimary() instanceof BaseMemberExpression) { // func(params) => expr; BaseMemberExpression bme = (BaseMemberExpression) paramExpr.getPrimary(); Declaration bmeDecl = bme.getDeclaration(); if (bmeDecl.isMember()) { qualify(that, bmeDecl); } else { out("var "); } out(names.name(bmeDecl), "="); singleExprFunction(paramExpr.getParameterLists(), that.getSpecifierExpression().getExpression(), that.getScope()); out(";"); } } } private void addSpecifierToPrototype(final TypeDeclaration outer, final SpecifierStatement specStmt) { if (specStmt.getBaseMemberExpression() instanceof BaseMemberExpression) { BaseMemberExpression bme = (BaseMemberExpression) specStmt.getBaseMemberExpression(); Declaration bmeDecl = bme.getDeclaration(); if (specStmt.getSpecifierExpression() instanceof LazySpecifierExpression) { // attr => expr; out(names.self(outer), ".", names.getter(bmeDecl), "=function()"); beginBlock(); initSelf(specStmt.getScope()); out("return "); specStmt.getSpecifierExpression().visit(this); out(";"); endBlockNewLine(true); } else if (bmeDecl.isMember() && (bmeDecl instanceof Value)) { // attr = expr; out(names.self(outer), ".", names.getter(bmeDecl), "=function(){return this.", names.name(bmeDecl), ";};"); endLine(); } } else if ((specStmt.getBaseMemberExpression() instanceof ParameterizedExpression) && (specStmt.getSpecifierExpression() != null)) { final ParameterizedExpression paramExpr = (ParameterizedExpression) specStmt.getBaseMemberExpression(); if (paramExpr.getPrimary() instanceof BaseMemberExpression) { // func(params) => expr; BaseMemberExpression bme = (BaseMemberExpression) paramExpr.getPrimary(); out(names.self(outer), ".", names.name(bme.getDeclaration()), "="); singleExprFunction(paramExpr.getParameterLists(), specStmt.getSpecifierExpression().getExpression(), specStmt.getScope()); out(";"); } } } @Override public void visit(final AssignOp that) { String returnValue = null; MemberOrTypeExpression lhsExpr = null; out("("); if (that.getLeftTerm() instanceof BaseMemberExpression) { BaseMemberExpression bme = (BaseMemberExpression) that.getLeftTerm(); lhsExpr = bme; Declaration bmeDecl = bme.getDeclaration(); boolean simpleSetter = hasSimpleGetterSetter(bmeDecl); if (!simpleSetter) { returnValue = memberAccess(bme); } } else if (that.getLeftTerm() instanceof QualifiedMemberExpression) { QualifiedMemberExpression qme = (QualifiedMemberExpression)that.getLeftTerm(); lhsExpr = qme; boolean simpleSetter = hasSimpleGetterSetter(qme.getDeclaration()); String lhsVar = null; if (!simpleSetter) { lhsVar = createRetainedTempVar(); out(lhsVar, "="); super.visit(qme); out(",", lhsVar, "."); returnValue = lhsVar + "." + memberAccess(qme); } else { super.visit(qme); out("."); } } generateMemberAccess(lhsExpr, new MemberAccessCallback() { @Override public void generateValue() { int boxType = boxUnboxStart(that.getRightTerm(), that.getLeftTerm()); that.getRightTerm().visit(GenerateJsVisitor.this); boxUnboxEnd(boxType); } }, true); if (returnValue != null) { out(",", returnValue); } out(")"); } /** Outputs the module name for the specified declaration. Returns true if something was output. */ boolean qualify(Node that, Declaration d) { if (d.getUnit().getPackage().getModule().isDefault()) { return false; } String path = qualifiedPath(that, d); if (path.length() > 0) { out(path, "."); } return path.length() > 0; } private String qualifiedPath(Node that, Declaration d) { return qualifiedPath(that, d, false); } private String qualifiedPath(Node that, Declaration d, boolean inProto) { boolean isMember = d.isClassOrInterfaceMember(); if (!isMember && isImported(that, d)) { return names.moduleAlias(d.getUnit().getPackage().getModule()); } else if (prototypeStyle && !inProto) { if (isMember && !(d instanceof com.redhat.ceylon.compiler.typechecker.model.Parameter && !d.isCaptured())) { TypeDeclaration id = that.getScope().getInheritingDeclaration(d); if (id == null) { //a local declaration of some kind, //perhaps in an outer scope id = (TypeDeclaration) d.getContainer(); } //else { //an inherited declaration that might be //inherited by an outer scope //} String path = ""; Scope scope = that.getScope(); // if (inProto) { // while ((scope != null) && (scope instanceof TypeDeclaration)) { // scope = scope.getContainer(); // } // } if ((scope != null) && ((that instanceof ClassDeclaration) || (that instanceof InterfaceDeclaration))) { // class/interface aliases have no own "this" scope = scope.getContainer(); } while (scope != null) { if (scope instanceof TypeDeclaration) { if (path.length() > 0) { path += '.'; } path += names.self((TypeDeclaration) scope); } else { path = ""; } if (scope == id) { break; } scope = scope.getContainer(); } return path; } } else if (d != null && (d.isShared() || inProto) && isMember) { TypeDeclaration id = that.getScope().getInheritingDeclaration(d); if (id==null) { //a shared local declaration return names.self((TypeDeclaration)d.getContainer()); } else { //an inherited declaration that might be //inherited by an outer scope return names.self(id); } } return ""; } /** Tells whether a declaration is in the same package as a node. */ private boolean isImported(Node that, Declaration d) { if (d == null) { return false; } Package p1 = d.getUnit().getPackage(); Package p2 = that == null ? null : that.getUnit().getPackage(); return !p1.equals(p2); } @Override public void visit(ExecutableStatement that) { super.visit(that); endLine(true); } /** Creates a new temporary variable which can be used immediately, even * inside an expression. The declaration for that temporary variable will be * emitted after the current Ceylon statement has been completely processed. * The resulting code is valid because JavaScript variables may be used before * they are declared. */ private String createRetainedTempVar(String baseName) { String varName = names.createTempVariable(baseName); retainedVars.add(varName); return varName; } private String createRetainedTempVar() { return createRetainedTempVar("tmp"); } // @Override // public void visit(Expression that) { // if (that.getTerm() instanceof QualifiedMemberOrTypeExpression) { // QualifiedMemberOrTypeExpression term = (QualifiedMemberOrTypeExpression) that.getTerm(); // // References to methods of types from other packages always need // // special treatment, even if prototypeStyle==false, because they // // may have been generated in prototype style. In particular, // // ceylon.language is always in prototype style. // if ((term.getDeclaration() instanceof Functional) // && (prototypeStyle || !declaredInThisPackage(term.getDeclaration()))) { // if (term.getMemberOperator() instanceof SpreadOp) { // generateSpread(term); // } else { // generateCallable(term, names.name(term.getDeclaration())); // } // return; // } // } // super.visit(that); // } @Override public void visit(Return that) { out("return "); super.visit(that); } @Override public void visit(AnnotationList that) {} void self(TypeDeclaration d) { out(names.self(d)); } /* * Output the name of a variable that receives the type parameter info, usually in the class constructor. * / private void selfTypeParameters(TypeDeclaration d) { out(selfTypeParametersString(d)); } private String selfTypeParametersString(TypeDeclaration d) { return "$$typeParms" + d.getName(); }*/ /*private void self() { out("$$"); }*/ private boolean outerSelf(Declaration d) { if (d.isToplevel()) { out("exports"); return true; } else if (d.isClassOrInterfaceMember()) { self((TypeDeclaration)d.getContainer()); return true; } return false; } private boolean declaredInCL(Declaration decl) { return decl.getUnit().getPackage().getQualifiedNameString() .startsWith("ceylon.language"); } @Override public void visit(SumOp that) { binaryOp(that, new BinaryOpGenerator() { @Override public void generate(BinaryOpTermGenerator termgen) { termgen.left(); out(".plus("); termgen.right(); out(")"); } }); } @Override public void visit(DifferenceOp that) { binaryOp(that, new BinaryOpGenerator() { @Override public void generate(BinaryOpTermGenerator termgen) { termgen.left(); out(".minus("); termgen.right(); out(")"); } }); } @Override public void visit(ProductOp that) { binaryOp(that, new BinaryOpGenerator() { @Override public void generate(BinaryOpTermGenerator termgen) { termgen.left(); out(".times("); termgen.right(); out(")"); } }); } @Override public void visit(QuotientOp that) { binaryOp(that, new BinaryOpGenerator() { @Override public void generate(BinaryOpTermGenerator termgen) { termgen.left(); out(".divided("); termgen.right(); out(")"); } }); } @Override public void visit(RemainderOp that) { binaryOp(that, new BinaryOpGenerator() { @Override public void generate(BinaryOpTermGenerator termgen) { termgen.left(); out(".remainder("); termgen.right(); out(")"); } }); } @Override public void visit(PowerOp that) { binaryOp(that, new BinaryOpGenerator() { @Override public void generate(BinaryOpTermGenerator termgen) { termgen.left(); out(".power("); termgen.right(); out(")"); } }); } @Override public void visit(AddAssignOp that) { arithmeticAssignOp(that, "plus"); } @Override public void visit(SubtractAssignOp that) { arithmeticAssignOp(that, "minus"); } @Override public void visit(MultiplyAssignOp that) { arithmeticAssignOp(that, "times"); } @Override public void visit(DivideAssignOp that) { arithmeticAssignOp(that, "divided"); } @Override public void visit(RemainderAssignOp that) { arithmeticAssignOp(that, "remainder"); } private void arithmeticAssignOp(final ArithmeticAssignmentOp that, final String functionName) { Term lhs = that.getLeftTerm(); if (lhs instanceof BaseMemberExpression) { BaseMemberExpression lhsBME = (BaseMemberExpression) lhs; Declaration lhsDecl = lhsBME.getDeclaration(); final String getLHS = memberAccess(lhsBME); out("("); generateMemberAccess(lhsBME, new MemberAccessCallback() { @Override public void generateValue() { out(getLHS, ".", functionName, "("); that.getRightTerm().visit(GenerateJsVisitor.this); out(")"); } }, true); if (!hasSimpleGetterSetter(lhsDecl)) { out(",", getLHS); } out(")"); } else if (lhs instanceof QualifiedMemberExpression) { QualifiedMemberExpression lhsQME = (QualifiedMemberExpression) lhs; if (isNative(lhsQME)) { // ($1.foo = Box($1.foo).operator($2)) out("("); lhsQME.getPrimary().visit(this); out(".", lhsQME.getDeclaration().getName()); out("="); int boxType = boxStart(lhsQME); lhsQME.getPrimary().visit(this); out(".", lhsQME.getDeclaration().getName()); boxUnboxEnd(boxType); out(".", functionName, "("); that.getRightTerm().visit(this); out("))"); } else { final String lhsPrimaryVar = createRetainedTempVar(); final String getLHS = lhsPrimaryVar + "." + memberAccess(lhsQME); out("(", lhsPrimaryVar, "="); lhsQME.getPrimary().visit(this); out(",", lhsPrimaryVar, "."); generateMemberAccess(lhsQME, new MemberAccessCallback() { @Override public void generateValue() { out(getLHS, ".", functionName, "("); that.getRightTerm().visit(GenerateJsVisitor.this); out(")"); } }, false); if (!hasSimpleGetterSetter(lhsQME.getDeclaration())) { out(",", getLHS); } out(")"); } } } @Override public void visit(final NegativeOp that) { unaryOp(that, new UnaryOpGenerator() { @Override public void generate(UnaryOpTermGenerator termgen) { TypeDeclaration d = that.getTerm().getTypeModel().getDeclaration(); if (d.inherits(types._integer)) { out("(-"); termgen.term(); out(")"); //This is not really optimal yet, since it generates //stuff like Float(-Float((5.1))) /*} else if (d.inherits(types._float)) { out(clAlias, "Float(-"); termgen.term(); out(")");*/ } else { termgen.term(); out(".getNegativeValue()"); } } }); } @Override public void visit(final PositiveOp that) { unaryOp(that, new UnaryOpGenerator() { @Override public void generate(UnaryOpTermGenerator termgen) { TypeDeclaration d = that.getTerm().getTypeModel().getDeclaration(); if (d.inherits(types._integer) || d.inherits(types._float)) { out("(+"); termgen.term(); out(")"); } else { termgen.term(); out(".getPositiveValue()"); } } }); } @Override public void visit(EqualOp that) { leftEqualsRight(that); } @Override public void visit(NotEqualOp that) { out("(!"); leftEqualsRight(that); out(")"); } @Override public void visit(NotOp that) { unaryOp(that, new UnaryOpGenerator() { @Override public void generate(UnaryOpTermGenerator termgen) { out("(!"); termgen.term(); out(")"); } }); } @Override public void visit(IdenticalOp that) { binaryOp(that, new BinaryOpGenerator() { @Override public void generate(BinaryOpTermGenerator termgen) { out("("); termgen.left(); out("==="); termgen.right(); out(")"); } }); } @Override public void visit(CompareOp that) { leftCompareRight(that); } @Override public void visit(SmallerOp that) { leftCompareRight(that); out(".equals(", clAlias, "getSmaller())"); } @Override public void visit(LargerOp that) { leftCompareRight(that); out(".equals(", clAlias, "getLarger())"); } @Override public void visit(SmallAsOp that) { out("("); leftCompareRight(that); out("!==", clAlias, "getLarger()"); out(")"); } @Override public void visit(LargeAsOp that) { out("("); leftCompareRight(that); out("!==", clAlias, "getSmaller()"); out(")"); } /** Outputs the CL equivalent of 'a==b' in JS. */ private void leftEqualsRight(BinaryOperatorExpression that) { binaryOp(that, new BinaryOpGenerator() { @Override public void generate(BinaryOpTermGenerator termgen) { termgen.left(); out(".equals("); termgen.right(); out(")"); } }); } interface UnaryOpTermGenerator { void term(); } interface UnaryOpGenerator { void generate(UnaryOpTermGenerator termgen); } private void unaryOp(final UnaryOperatorExpression that, final UnaryOpGenerator gen) { final GenerateJsVisitor visitor = this; gen.generate(new UnaryOpTermGenerator() { @Override public void term() { int boxTypeLeft = boxStart(that.getTerm()); that.getTerm().visit(visitor); boxUnboxEnd(boxTypeLeft); } }); } interface BinaryOpTermGenerator { void left(); void right(); } interface BinaryOpGenerator { void generate(BinaryOpTermGenerator termgen); } private void binaryOp(final BinaryOperatorExpression that, final BinaryOpGenerator gen) { final GenerateJsVisitor visitor = this; gen.generate(new BinaryOpTermGenerator() { @Override public void left() { int boxTypeLeft = boxStart(that.getLeftTerm()); that.getLeftTerm().visit(visitor); boxUnboxEnd(boxTypeLeft); } @Override public void right() { int boxTypeRight = boxStart(that.getRightTerm()); that.getRightTerm().visit(visitor); boxUnboxEnd(boxTypeRight); } }); } /** Outputs the CL equivalent of 'a <=> b' in JS. */ private void leftCompareRight(BinaryOperatorExpression that) { binaryOp(that, new BinaryOpGenerator() { @Override public void generate(BinaryOpTermGenerator termgen) { termgen.left(); out(".compare("); termgen.right(); out(")"); } }); } @Override public void visit(AndOp that) { binaryOp(that, new BinaryOpGenerator() { @Override public void generate(BinaryOpTermGenerator termgen) { out("("); termgen.left(); out("&&"); termgen.right(); out(")"); } }); } @Override public void visit(OrOp that) { binaryOp(that, new BinaryOpGenerator() { @Override public void generate(BinaryOpTermGenerator termgen) { out("("); termgen.left(); out("||"); termgen.right(); out(")"); } }); } @Override public void visit(final EntryOp that) { binaryOp(that, new BinaryOpGenerator() { @Override public void generate(BinaryOpTermGenerator termgen) { out(clAlias, "Entry("); termgen.left(); out(","); termgen.right(); out(","); TypeUtils.printTypeArguments(that, that.getTypeModel().getTypeArgumentList(), GenerateJsVisitor.this); out(")"); } }); } @Override public void visit(Element that) { out(".item("); that.getExpression().visit(this); out(")"); } @Override public void visit(DefaultOp that) { binaryOp(that, new BinaryOpGenerator() { @Override public void generate(BinaryOpTermGenerator termgen) { String lhsVar = createRetainedTempVar("opt"); out("(", lhsVar, "="); termgen.left(); out(",", lhsVar, "!==null?", lhsVar, ":"); termgen.right(); out(")"); } }); } @Override public void visit(ThenOp that) { binaryOp(that, new BinaryOpGenerator() { @Override public void generate(BinaryOpTermGenerator termgen) { out("("); termgen.left(); out("?"); termgen.right(); out(":null)"); } }); } @Override public void visit(IncrementOp that) { prefixIncrementOrDecrement(that.getTerm(), "getSuccessor"); } @Override public void visit(DecrementOp that) { prefixIncrementOrDecrement(that.getTerm(), "getPredecessor"); } private boolean hasSimpleGetterSetter(Declaration decl) { return !((decl instanceof Getter) || (decl instanceof Setter) || decl.isFormal()); } private void prefixIncrementOrDecrement(Term term, String functionName) { if (term instanceof BaseMemberExpression) { BaseMemberExpression bme = (BaseMemberExpression) term; boolean simpleSetter = hasSimpleGetterSetter(bme.getDeclaration()); String getMember = memberAccess(bme); String applyFunc = String.format("%s.%s()", getMember, functionName); out("("); generateMemberAccess(bme, applyFunc, true); if (!simpleSetter) { out(",", getMember); } out(")"); } else if (term instanceof QualifiedMemberExpression) { QualifiedMemberExpression qme = (QualifiedMemberExpression) term; String primaryVar = createRetainedTempVar(); String getMember = primaryVar + "." + memberAccess(qme); String applyFunc = String.format("%s.%s()", getMember, functionName); out("(", primaryVar, "="); qme.getPrimary().visit(this); out(",", primaryVar, "."); generateMemberAccess(qme, applyFunc, false); if (!hasSimpleGetterSetter(qme.getDeclaration())) { out(",", getMember); } out(")"); } } @Override public void visit(PostfixIncrementOp that) { postfixIncrementOrDecrement(that.getTerm(), "getSuccessor"); } @Override public void visit(PostfixDecrementOp that) { postfixIncrementOrDecrement(that.getTerm(), "getPredecessor"); } private void postfixIncrementOrDecrement(Term term, String functionName) { if (term instanceof BaseMemberExpression) { BaseMemberExpression bme = (BaseMemberExpression) term; String oldValueVar = createRetainedTempVar("old" + bme.getDeclaration().getName()); String applyFunc = String.format("%s.%s()", oldValueVar, functionName); out("(", oldValueVar, "=", memberAccess(bme), ","); generateMemberAccess(bme, applyFunc, true); out(",", oldValueVar, ")"); } else if (term instanceof QualifiedMemberExpression) { QualifiedMemberExpression qme = (QualifiedMemberExpression) term; String primaryVar = createRetainedTempVar(); String oldValueVar = createRetainedTempVar("old" + qme.getDeclaration().getName()); String applyFunc = String.format("%s.%s()", oldValueVar, functionName); out("(", primaryVar, "="); qme.getPrimary().visit(this); out(",", oldValueVar, "=", primaryVar, ".", memberAccess(qme), ",", primaryVar, "."); generateMemberAccess(qme, applyFunc, false); out(",", oldValueVar, ")"); } } @Override public void visit(final UnionOp that) { binaryOp(that, new BinaryOpGenerator() { @Override public void generate(BinaryOpTermGenerator termgen) { termgen.left(); out(".union("); termgen.right(); out(","); TypeUtils.printTypeArguments(that, that.getRightTerm().getTypeModel().getTypeArgumentList(), GenerateJsVisitor.this); out(")"); } }); } @Override public void visit(final IntersectionOp that) { binaryOp(that, new BinaryOpGenerator() { @Override public void generate(BinaryOpTermGenerator termgen) { termgen.left(); out(".intersection("); termgen.right(); out(","); TypeUtils.printTypeArguments(that, that.getRightTerm().getTypeModel().getTypeArgumentList(), GenerateJsVisitor.this); out(")"); } }); } @Override public void visit(final XorOp that) { binaryOp(that, new BinaryOpGenerator() { @Override public void generate(BinaryOpTermGenerator termgen) { termgen.left(); out(".exclusiveUnion("); termgen.right(); out(","); TypeUtils.printTypeArguments(that, that.getRightTerm().getTypeModel().getTypeArgumentList(), GenerateJsVisitor.this); out(")"); } }); } @Override public void visit(final ComplementOp that) { binaryOp(that, new BinaryOpGenerator() { @Override public void generate(BinaryOpTermGenerator termgen) { termgen.left(); out(".complement("); termgen.right(); out(","); TypeUtils.printTypeArguments(that, that.getRightTerm().getTypeModel().getTypeArgumentList(), GenerateJsVisitor.this); out(")"); } }); } @Override public void visit(Exists that) { unaryOp(that, new UnaryOpGenerator() { @Override public void generate(UnaryOpTermGenerator termgen) { out(clAlias, "exists("); termgen.term(); out(")"); } }); } @Override public void visit(Nonempty that) { unaryOp(that, new UnaryOpGenerator() { @Override public void generate(UnaryOpTermGenerator termgen) { out(clAlias, "nonempty("); termgen.term(); out(")"); } }); } //Don't know if we'll ever see this... @Override public void visit(ConditionList that) { System.out.println("ZOMG condition list in the wild! " + that.getLocation() + " of " + that.getUnit().getFilename()); super.visit(that); } @Override public void visit(BooleanCondition that) { int boxType = boxStart(that.getExpression().getTerm()); super.visit(that); boxUnboxEnd(boxType); } @Override public void visit(IfStatement that) { conds.generateIf(that); } @Override public void visit(WhileStatement that) { conds.generateWhile(that); } /** Generates js code to check if a term is of a certain type. We solve this in JS by * checking against all types that Type satisfies (in the case of union types, matching any * type will do, and in case of intersection types, all types must be matched). * @param term The term that is to be checked against a type * @param termString (optional) a string to be used as the term to be checked * @param type The type to check against * @param tmpvar (optional) a variable to which the term is assigned * @param negate If true, negates the generated condition */ void generateIsOfType(Term term, String termString, Type type, String tmpvar, final boolean negate) { if (negate) { out("!"); } out(clAlias, "isOfType("); if (term != null) { conds.specialConditionRHS(term, tmpvar); } else { conds.specialConditionRHS(termString, tmpvar); } out(","); TypeUtils.typeNameOrList(term, type.getTypeModel(), this, true); out(")"); } @Override public void visit(IsOp that) { generateIsOfType(that.getTerm(), null, that.getType(), null, false); } @Override public void visit(Break that) { if (continues.isEmpty()) { out("break;"); } else { Continuation top=continues.peek(); if (that.getScope()==top.getScope()) { top.useBreak(); out(top.getBreakName(), "=true; return;"); } else { out("break;"); } } } @Override public void visit(Continue that) { if (continues.isEmpty()) { out("continue;"); } else { Continuation top=continues.peek(); if (that.getScope()==top.getScope()) { top.useContinue(); out(top.getContinueName(), "=true; return;"); } else { out("continue;"); } } } @Override public void visit(final RangeOp that) { binaryOp(that, new BinaryOpGenerator() { @Override public void generate(BinaryOpTermGenerator termgen) { out(clAlias, "Range("); termgen.left(); out(","); termgen.right(); out(","); TypeUtils.printTypeArguments(that, Collections.singletonList(that.getLeftTerm().getTypeModel()), GenerateJsVisitor.this); out(")"); } }); } @Override public void visit(ForStatement that) { if (comment) { out("//'for' statement at ", that.getUnit().getFilename(), " (", that.getLocation(), ")"); if (that.getExits()) out("//EXITS!"); endLine(); } ForIterator foriter = that.getForClause().getForIterator(); final String itemVar = generateForLoop(foriter); boolean hasElse = that.getElseClause() != null && !that.getElseClause().getBlock().getStatements().isEmpty(); visitStatements(that.getForClause().getBlock().getStatements()); //If there's an else block, check for normal termination endBlock(); if (hasElse) { endLine(); out("if (", clAlias, "getFinished() === ", itemVar, ")"); encloseBlockInFunction(that.getElseClause().getBlock()); } } /** Generates code for the beginning of a "for" loop, returning the name of the variable used for the item. */ private String generateForLoop(ForIterator that) { SpecifierExpression iterable = that.getSpecifierExpression(); final String iterVar = names.createTempVariable("it"); final String itemVar; if (that instanceof ValueIterator) { itemVar = names.name(((ValueIterator)that).getVariable().getDeclarationModel()); } else { itemVar = names.createTempVariable("item"); } out("var ", iterVar, " = "); iterable.visit(this); out(".getIterator();"); endLine(); out("var ", itemVar, ";while ((", itemVar, "=", iterVar, ".next())!==", clAlias, "getFinished())"); beginBlock(); if (that instanceof ValueIterator) { directAccess.add(((ValueIterator)that).getVariable().getDeclarationModel()); } else if (that instanceof KeyValueIterator) { String keyvar = names.name(((KeyValueIterator)that).getKeyVariable().getDeclarationModel()); String valvar = names.name(((KeyValueIterator)that).getValueVariable().getDeclarationModel()); out("var ", keyvar, "=", itemVar, ".getKey();"); endLine(); out("var ", valvar, "=", itemVar, ".getItem();"); directAccess.add(((KeyValueIterator)that).getKeyVariable().getDeclarationModel()); directAccess.add(((KeyValueIterator)that).getValueVariable().getDeclarationModel()); endLine(); } return itemVar; } public void visit(InOp that) { binaryOp(that, new BinaryOpGenerator() { @Override public void generate(BinaryOpTermGenerator termgen) { termgen.right(); out(".contains("); termgen.left(); out(")"); } }); } @Override public void visit(TryCatchStatement that) { out("try"); encloseBlockInFunction(that.getTryClause().getBlock()); if (!that.getCatchClauses().isEmpty()) { String catchVarName = names.createTempVariable("ex"); out("catch(", catchVarName, ")"); beginBlock(); boolean firstCatch = true; for (CatchClause catchClause : that.getCatchClauses()) { Variable variable = catchClause.getCatchVariable().getVariable(); if (!firstCatch) { out("else "); } firstCatch = false; out("if("); generateIsOfType(null, catchVarName, variable.getType(), null, false); out(")"); if (catchClause.getBlock().getStatements().isEmpty()) { out("{}"); } else { beginBlock(); directAccess.add(variable.getDeclarationModel()); names.forceName(variable.getDeclarationModel(), catchVarName); visitStatements(catchClause.getBlock().getStatements()); endBlockNewLine(); } } out("else{throw ", catchVarName, "}"); endBlockNewLine(); } if (that.getFinallyClause() != null) { out("finally"); encloseBlockInFunction(that.getFinallyClause().getBlock()); } } @Override public void visit(Throw that) { out("throw "); if (that.getExpression() != null) { that.getExpression().visit(this); } else { out(clAlias, "Exception()"); } out(";"); } private void visitIndex(IndexExpression that) { that.getPrimary().visit(this); ElementOrRange eor = that.getElementOrRange(); if (eor instanceof Element) { out(".item("); ((Element)eor).getExpression().visit(this); out(")"); } else {//range, or spread? ElementRange er = (ElementRange)eor; Expression sexpr = er.getLength(); if (sexpr == null) { if (er.getLowerBound() == null) { out(".spanTo("); } else if (er.getUpperBound() == null) { out(".spanFrom("); } else { out(".span("); } } else { out(".segment("); } if (er.getLowerBound() != null) { er.getLowerBound().visit(this); if (er.getUpperBound() != null || sexpr != null) { out(","); } } if (er.getUpperBound() != null) { er.getUpperBound().visit(this); } else if (sexpr != null) { sexpr.visit(this); } out(")"); } } public void visit(IndexExpression that) { IndexOperator op = that.getIndexOperator(); if (op instanceof SafeIndexOp) { out(clAlias, "exists("); that.getPrimary().visit(this); out(")?"); } visitIndex(that); if (op instanceof SafeIndexOp) { out(":null"); } } /** Generates code for a case clause, as part of a switch statement. Each case * is rendered as an if. */ private void caseClause(CaseClause cc, String expvar, Term switchTerm) { out("if ("); final CaseItem item = cc.getCaseItem(); if (item instanceof IsCase) { IsCase isCaseItem = (IsCase) item; generateIsOfType(null, expvar, isCaseItem.getType(), null, false); Variable caseVar = isCaseItem.getVariable(); if (caseVar != null) { directAccess.add(caseVar.getDeclarationModel()); names.forceName(caseVar.getDeclarationModel(), expvar); } } else if (item instanceof SatisfiesCase) { item.addError("case(satisfies) not yet supported"); out("true"); } else if (item instanceof MatchCase){ boolean first = true; for (Expression exp : ((MatchCase)item).getExpressionList().getExpressions()) { if (!first) out(" || "); out(expvar, "==="); //TODO equality? /*out(".equals(");*/ exp.visit(this); //out(")==="); clAlias(); out("getTrue()"); first = false; } } else { cc.addUnexpectedError("support for case of type " + cc.getClass().getSimpleName() + " not yet implemented"); } out(") "); encloseBlockInFunction(cc.getBlock()); } @Override public void visit(SwitchStatement that) { if (comment) out("//Switch statement at ", that.getUnit().getFilename(), " (", that.getLocation(), ")"); endLine(); //Put the expression in a tmp var final String expvar = names.createTempVariable("switch"); out("var ", expvar, "="); Expression expr = that.getSwitchClause().getExpression(); expr.visit(this); endLine(true); //For each case, do an if boolean first = true; for (CaseClause cc : that.getSwitchCaseList().getCaseClauses()) { if (!first) out("else "); caseClause(cc, expvar, expr.getTerm()); first = false; } if (that.getSwitchCaseList().getElseClause() != null) { out("else "); that.getSwitchCaseList().getElseClause().visit(this); } if (comment) { out("//End switch statement at ", that.getUnit().getFilename(), " (", that.getLocation(), ")"); endLine(); } } /** Generates the code for an anonymous function defined inside an argument list. */ @Override public void visit(final FunctionArgument that) { singleExprFunction(that.getParameterLists(), that.getExpression(), that.getScope()); } private void singleExprFunction(final List<ParameterList> paramLists, final Expression expr, final Scope scope) { generateParameterLists(paramLists, scope, new ParameterListCallback() { @Override public void completeFunction() { beginBlock(); if (paramLists.size() == 1) { initSelf(scope); } initParameters(paramLists.get(paramLists.size()-1), null); out("return "); expr.visit(GenerateJsVisitor.this); out(";"); endBlock(); } }); } /** Generates the code for a function in a named argument list. */ @Override public void visit(final MethodArgument that) { generateParameterLists(that.getParameterLists(), that.getScope(), new ParameterListCallback() { @Override public void completeFunction() { Block block = that.getBlock(); SpecifierExpression specExpr = that.getSpecifierExpression(); if (specExpr != null) { out("{return "); specExpr.getExpression().visit(GenerateJsVisitor.this); out(";}"); } else if (block != null) { block.visit(GenerateJsVisitor.this); } } }); } @Override public void visit(SegmentOp that) { String rhs = names.createTempVariable(); out("(function(){var ", rhs, "="); that.getRightTerm().visit(this); endLine(true); out("if (", rhs, ">0){"); endLine(); String lhs = names.createTempVariable(); String end = names.createTempVariable(); out("var ", lhs, "="); that.getLeftTerm().visit(this); endLine(true); out("var ", end, "=", lhs); endLine(true); out("for (var i=1; i<", rhs, "; i++){", end, "=", end, ".getSuccessor();}"); endLine(); out("return ", clAlias, "Range("); out(lhs, ",", end, ")"); endLine(); out("}else return ", clAlias, "empty;}())"); } /** Generates the code for single or multiple parameter lists, with a callback function to generate the function blocks. */ private void generateParameterLists(List<ParameterList> plist, Scope scope, ParameterListCallback callback) { if (plist.size() == 1) { out(function); ParameterList paramList = plist.get(0); paramList.visit(this); callback.completeFunction(); } else { int count=0; for (ParameterList paramList : plist) { if (count==0) { out(function); } else { out("return function"); } paramList.visit(this); if (count == 0) { beginBlock(); initSelf(scope); initParameters(paramList, null); } else { out("{"); } count++; } callback.completeFunction(); for (int i=0; i < count; i++) { endBlock(false, i==count-1); } } } /** Encloses the block in a function, IF NEEDED. */ void encloseBlockInFunction(Block block) { boolean wrap=encloser.encloseBlock(block); if (wrap) { beginBlock(); Continuation c = new Continuation(block.getScope(), names); continues.push(c); out("var ", c.getContinueName(), "=false;"); endLine(); out("var ", c.getBreakName(), "=false;"); endLine(); out("var ", c.getReturnName(), "=(function()"); } block.visit(this); if (wrap) { Continuation c = continues.pop(); out("());if(", c.getReturnName(), "!==undefined){return ", c.getReturnName(), ";}"); if (c.isContinued()) { out("else if(", c.getContinueName(),"===true){continue;}"); } if (c.isBreaked()) { out("else if (", c.getBreakName(),"===true){break;}"); } endBlockNewLine(); } } private static class Continuation { private final String cvar; private final String rvar; private final String bvar; private final Scope scope; private boolean cused, bused; public Continuation(Scope scope, JsIdentifierNames names) { this.scope=scope; cvar = names.createTempVariable("cntvar"); rvar = names.createTempVariable("retvar"); bvar = names.createTempVariable("brkvar"); } public Scope getScope() { return scope; } public String getContinueName() { return cvar; } public String getBreakName() { return bvar; } public String getReturnName() { return rvar; } public void useContinue() { cused = true; } public void useBreak() { bused=true; } public boolean isContinued() { return cused; } public boolean isBreaked() { return bused; } //"isBroken" sounds really really bad in this case } private static interface ParameterListCallback { void completeFunction(); } /** This interface is used inside type initialization method. */ private interface PrototypeInitCallback { void addToPrototypeCallback(); } @Override public void visit(Tuple that) { int count = 0; SequencedArgument sarg = that.getSequencedArgument(); if (sarg == null) { out(clAlias, "empty"); } else { List<List<ProducedType>> targs = new ArrayList<List<ProducedType>>(); List<PositionalArgument> positionalArguments = sarg.getPositionalArguments(); boolean spread = !positionalArguments.isEmpty() && positionalArguments.get(positionalArguments.size()-1) instanceof Tree.ListedArgument == false; int lim = positionalArguments.size()-1; for (PositionalArgument expr : positionalArguments) { if (count > 0) { out(","); } if (count==lim && spread) { if (expr.getTypeModel().getDeclaration().inherits(types.tuple)) { expr.visit(this); } else { expr.visit(this); out(".getSequence()"); } } else { out(clAlias, "Tuple("); if (count > 0) { targs.add(0, targs.get(0).get(2).getTypeArgumentList()); } else { targs.add(that.getTypeModel().getTypeArgumentList()); } expr.visit(this); } count++; } if (!spread) { if (count > 0) { out(","); } out(clAlias, "empty"); } else { count--; } for (List<ProducedType> t : targs) { out(","); TypeUtils.printTypeArguments(that, t, this); out(")"); } } } @Override public void visit(Assertion that) { out("//assert"); location(that); String custom = "Assertion failed"; //Scan for a "doc" annotation with custom message for (Annotation ann : that.getAnnotationList().getAnnotations()) { BaseMemberExpression bme = (BaseMemberExpression)ann.getPrimary(); if ("doc".equals(bme.getDeclaration().getName())) { custom = ((Tree.ListedArgument)ann.getPositionalArgumentList().getPositionalArguments().get(0)).getExpression().getTerm().getText(); //unquote custom = custom.substring(1, custom.length() - 1); } } endLine(); StringBuilder sb = new StringBuilder(custom).append(": '"); for (int i = that.getConditionList().getToken().getTokenIndex()+1; i < that.getConditionList().getEndToken().getTokenIndex(); i++) { sb.append(tokens.get(i).getText()); } sb.append("' at ").append(that.getUnit().getFilename()).append(" (").append( that.getConditionList().getLocation()).append(")"); conds.specialConditionsAndBlock(that.getConditionList(), null, "if (!"); //escape custom = escapeStringLiteral(sb.toString()); out(") { throw ", clAlias, "Exception('", custom, "'); }"); endLine(); } void closeSequenceWithReifiedType(Node that, List<ProducedType> types) { out("].reifyCeylonType("); TypeUtils.printTypeArguments(that, types, this); out(")"); } }
Fixed code because Tree.SafeIndexOp was removed from language model
src/main/java/com/redhat/ceylon/compiler/js/GenerateJsVisitor.java
Fixed code because Tree.SafeIndexOp was removed from language model
<ide><path>rc/main/java/com/redhat/ceylon/compiler/js/GenerateJsVisitor.java <ide> } <ide> <ide> public void visit(IndexExpression that) { <del> IndexOperator op = that.getIndexOperator(); <del> if (op instanceof SafeIndexOp) { <del> out(clAlias, "exists("); <del> that.getPrimary().visit(this); <del> out(")?"); <del> } <ide> visitIndex(that); <del> if (op instanceof SafeIndexOp) { <del> out(":null"); <del> } <ide> } <ide> <ide> /** Generates code for a case clause, as part of a switch statement. Each case
Java
epl-1.0
9cac3bfda5bab4762a37b2b848ab50a90c40a910
0
rrimmana/birt-1,sguan-actuate/birt,sguan-actuate/birt,Charling-Huang/birt,rrimmana/birt-1,rrimmana/birt-1,rrimmana/birt-1,Charling-Huang/birt,Charling-Huang/birt,Charling-Huang/birt,sguan-actuate/birt,rrimmana/birt-1,Charling-Huang/birt,sguan-actuate/birt,sguan-actuate/birt
/************************************************************************************* * Copyright (c) 2004 Actuate Corporation and others. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html * * Contributors: * Actuate Corporation - Initial implementation. ************************************************************************************/ package org.eclipse.birt.report.designer.internal.ui.palette; import java.util.ArrayList; import java.util.List; import org.eclipse.birt.report.designer.core.IReportElementConstants; import org.eclipse.birt.report.designer.core.model.SessionHandleAdapter; import org.eclipse.birt.report.designer.core.model.schematic.ListBandProxy; import org.eclipse.birt.report.designer.core.model.views.data.DataSetItemModel; import org.eclipse.birt.report.designer.internal.ui.dialogs.TableOptionDialog; import org.eclipse.birt.report.designer.internal.ui.dnd.InsertInLayoutUtil; import org.eclipse.birt.report.designer.internal.ui.editors.schematic.extensions.ExtendedElementToolExtends; import org.eclipse.birt.report.designer.internal.ui.editors.schematic.tools.AbstractToolHandleExtends; import org.eclipse.birt.report.designer.internal.ui.util.ExceptionHandler; import org.eclipse.birt.report.designer.internal.ui.util.UIUtil; import org.eclipse.birt.report.designer.internal.ui.views.dialogs.provider.DataSetColumnBindingsFormHandleProvider; import org.eclipse.birt.report.designer.nls.Messages; import org.eclipse.birt.report.designer.ui.ReportPlugin; import org.eclipse.birt.report.designer.ui.dialogs.SelectVariableDialog; import org.eclipse.birt.report.designer.ui.newelement.DesignElementFactory; import org.eclipse.birt.report.model.api.AutoTextHandle; import org.eclipse.birt.report.model.api.CellHandle; import org.eclipse.birt.report.model.api.DataItemHandle; import org.eclipse.birt.report.model.api.DataSetHandle; import org.eclipse.birt.report.model.api.DesignElementHandle; import org.eclipse.birt.report.model.api.GridHandle; import org.eclipse.birt.report.model.api.ImageHandle; import org.eclipse.birt.report.model.api.LabelHandle; import org.eclipse.birt.report.model.api.LibraryHandle; import org.eclipse.birt.report.model.api.ListHandle; import org.eclipse.birt.report.model.api.ModuleHandle; import org.eclipse.birt.report.model.api.ReportDesignHandle; import org.eclipse.birt.report.model.api.ReportItemHandle; import org.eclipse.birt.report.model.api.ResultSetColumnHandle; import org.eclipse.birt.report.model.api.RowHandle; import org.eclipse.birt.report.model.api.ScalarParameterHandle; import org.eclipse.birt.report.model.api.TableHandle; import org.eclipse.birt.report.model.api.TextDataHandle; import org.eclipse.birt.report.model.api.TextItemHandle; import org.eclipse.birt.report.model.api.activity.SemanticException; import org.eclipse.birt.report.model.api.elements.DesignChoiceConstants; import org.eclipse.birt.report.model.api.olap.DimensionHandle; import org.eclipse.birt.report.model.api.olap.MeasureHandle; import org.eclipse.gef.palette.MarqueeToolEntry; import org.eclipse.gef.palette.PaletteContainer; import org.eclipse.gef.palette.PaletteGroup; import org.eclipse.gef.palette.PaletteRoot; import org.eclipse.gef.palette.PanningSelectionToolEntry; import org.eclipse.gef.palette.ToolEntry; import org.eclipse.gef.requests.CreateRequest; import org.eclipse.jface.dialogs.Dialog; import org.eclipse.jface.window.Window; /** * is the base class of Palette factory. Creates default tools here. */ public class BasePaletteFactory { private static final String POINTER_SELECT_LABEL = Messages.getString( "BasePaletteFactory.categoryName.PointerSelect" );//$NON-NLS-1$ private static final String RECTANGEL_SELECT_LABEL = Messages.getString( "BasePaletteFactory.categoryName.RectangleSelect" );//$NON-NLS-1$ private static final String TOOL_TIP_POINTER_SELECT = Messages.getString( "BasePaletteFactory.toolTip.PointerSelect" );//$NON-NLS-1$ private static final String TOOL_TIP_RECTANGLE_SELECT = Messages.getString( "BasePaletteFactory.toolTip.RectangleSelect" );//$NON-NLS-1$ private static final String PALETTE_GROUP_TEXT = Messages.getString( "BasePaletteFactory.Group.Items" ); //$NON-NLS-1$ private static final String AUTOTEXT_LABEL_PAGE_X_OF_Y = Messages.getString( "BasePaletteFactory.AutoTextLabel.PageXofY" ); //$NON-NLS-1$ private static final String AUTOTEXT_LABEL_PAGE_COUNT = Messages.getString( "BasePaletteFactory.AutoTextLabel.PageCount" ); //$NON-NLS-1$ private static final String AUTOTEXT_LABEL_LAST_PRINTED = Messages.getString( "BasePaletteFactory.AutoTextLabel.LastPrinted" ); //$NON-NLS-1$ private static final String AUTOTEXT_LABEL_FILENAME = Messages.getString( "BasePaletteFactory.AutoTextLabel.Filename" ); //$NON-NLS-1$ private static final String AUTOTEXT_LABEL_CREATE_BY = Messages.getString( "BasePaletteFactory.AutoTextLabel.CreatedBy" ); //$NON-NLS-1$ private static final String AUTOTEXT_LABEL_CREATE_ON = Messages.getString( "BasePaletteFactory.AutoTextLabel.CreatedOn" ); //$NON-NLS-1$ private static final String AUTOTEXT_LABEL_PAGE = Messages.getString( "BasePaletteFactory.AutoTextLabel.Page" ); //$NON-NLS-1$ private static final String AUTOTEXT_TEXT_CONFIDENTIAL = Messages.getString( "BasePaletteFactory.AutoText.Confidential" ); //$NON-NLS-1$ protected static PaletteRoot createPalette( ) { PaletteRoot root = new PaletteRoot( ); root.add( createControlGroup( root ) ); return root; } /** * Creates default tools category, which include selection and marquee tools * * @param root * the root * @return PaletteContainer contains default tools */ protected static PaletteContainer createControlGroup( PaletteRoot root ) { PaletteGroup controlGroup = new PaletteGroup( PALETTE_GROUP_TEXT ); List entries = new ArrayList( ); ToolEntry tool = new PanningSelectionToolEntry( POINTER_SELECT_LABEL, TOOL_TIP_POINTER_SELECT ); entries.add( tool ); root.setDefaultEntry( tool ); tool = new MarqueeToolEntry( RECTANGEL_SELECT_LABEL, TOOL_TIP_RECTANGLE_SELECT ); entries.add( tool ); controlGroup.addAll( entries ); return controlGroup; } /** * Provides element building support for table element. */ public static class TableToolExtends extends AbstractToolHandleExtends { /* * (non-Javadoc) * * @seeorg.eclipse.birt.designer.internal.ui.editors.schematic.tools. * IToolHandleExtends#preHandleMouseDown() */ public boolean preHandleMouseUp( ) { CreateRequest request = getRequest( ); if ( IReportElementConstants.REPORT_ELEMENT_TABLE.equalsIgnoreCase( (String) request.getNewObjectType( ) ) ) { TableOptionDialog dlg = new TableOptionDialog( UIUtil.getDefaultShell( ), true ); if ( dlg.open( ) == Window.OK && dlg.getResult( ) instanceof Object[] ) { Object[] data = (Object[]) dlg.getResult( ); // String name = ReportPlugin.getDefault().getCustomName( // ReportDesignConstants.TABLE_ITEM); // // TableHandle table = SessionHandleAdapter.getInstance( ) // .getReportDesignHandle( ) // .getElementFactory( ) // .newTableItem( name, data[1], 1, data[0], 1 ); TableHandle table = DesignElementFactory.getInstance( ) .newTableItem( null, ( (Integer) data[1] ).intValue( ), 1, ( (Integer) data[0] ).intValue( ), 1 ); InsertInLayoutUtil.setInitWidth( table ); if ( data[2] != null ) { try { DataSetHandle dataSet = SessionHandleAdapter.getInstance( ) .getReportDesignHandle( ) .findDataSet( data[2].toString( ) ); ( (ReportItemHandle) table ).setDataSet( dataSet ); DataSetColumnBindingsFormHandleProvider provider = new DataSetColumnBindingsFormHandleProvider( ); provider.setBindingObject( table ); provider.generateAllBindingColumns( ); } catch ( Exception e ) { ExceptionHandler.handle( e ); } } setModel( table ); return super.preHandleMouseUp( ); } } return false; } /* * (non-Javadoc) * * @seeorg.eclipse.birt.designer.internal.ui.editors.schematic.tools. * AbstractToolHandleExtends#preHandleMouseDown() */ public boolean preHandleMouseDown( ) { return false; } } /** * Provides element building support for Data element. */ public static class DataToolExtends extends AbstractToolHandleExtends { /* * (non-Javadoc) * * @seeorg.eclipse.birt.designer.internal.ui.editors.schematic.tools. * IToolHandleExtends#preHandleMouseDown() */ public boolean preHandleMouseUp( ) { CreateRequest request = getRequest( ); if ( IReportElementConstants.REPORT_ELEMENT_DATA.equalsIgnoreCase( (String) request.getNewObjectType( ) ) ) { // DataItemHandle dataHandle = SessionHandleAdapter.getInstance( // ) // .getReportDesignHandle( ) // .getElementFactory( ) // .newDataItem( null ); DataItemHandle dataHandle = DesignElementFactory.getInstance( ) .newDataItem( null ); setModel( dataHandle ); // disable this dialog // dialog will pop-up after image is create // see ReportCreationTool.selectAddedObject() // BindingColumnDialog dialog = new BindingColumnDialog( true ); // dialog.setInput( dataHandle ); // if ( dialog.open( ) == Window.OK ) // { return super.preHandleMouseUp( ); // } } return false; } /* * (non-Javadoc) * * @seeorg.eclipse.birt.designer.internal.ui.editors.schematic.tools. * AbstractToolHandleExtends#preHandleMouseDown() */ public boolean preHandleMouseDown( ) { // TODO Auto-generated method stub return false; } } /** * Provides element building support for Image element. */ public static class ImageToolExtends extends AbstractToolHandleExtends { private List dataSetList = new ArrayList( ); /* * get target design element handle */ private DesignElementHandle getDesignElementHandle( ) { Object model = getTargetEditPart( ).getModel( ); DesignElementHandle desginElementHandle = null; if ( model instanceof DesignElementHandle ) { desginElementHandle = (DesignElementHandle) ( model ); } else if ( model instanceof ListBandProxy ) { desginElementHandle = ( (ListBandProxy) ( model ) ).getSlotHandle( ) .getElementHandle( ); } return desginElementHandle; } /* * (non-Javadoc) * * @seeorg.eclipse.birt.designer.internal.ui.editors.schematic.tools. * IToolHandleExtends#preHandleMouseDown() */ public boolean preHandleMouseUp( ) { // CreateRequest request = getRequest( ); // // DesignElementHandle desginElementHandle = getDesignElementHandle( // ); // dataSetList = DEUtil.getDataSetList( desginElementHandle ); // // if ( // IReportElementConstants.REPORT_ELEMENT_IMAGE.equalsIgnoreCase( // (String) request.getNewObjectType( ) ) ) // { // // Open the builder for new image // ImageBuilder dialog = new ImageBuilder( UIUtil.getDefaultShell( // ), // ImageBuilder.DLG_TITLE_NEW, // dataSetList ); // if ( Window.OK == dialog.open( ) ) // { // setModel( dialog.getResult( ) ); // // // If the dialog popup, mouse up event will not be called // // automatically, call it explicit // return super.preHandleMouseUp( ); // } // } // return false; ImageHandle dataHandle = DesignElementFactory.getInstance( ) .newImage( null ); setModel( dataHandle ); return super.preHandleMouseUp( ); } /* * (non-Javadoc) * * @seeorg.eclipse.birt.designer.internal.ui.editors.schematic.tools. * AbstractToolHandleExtends#preHandleMouseDown() */ public boolean preHandleMouseDown( ) { // TODO Auto-generated method stub return false; } } public static class AutoTextToolExtends extends AbstractToolHandleExtends { public boolean preHandleMouseUp( ) { CreateRequest request = getRequest( ); AutoTextHandle autoTextItemHandle = DesignElementFactory.getInstance( ) .newAutoText( null ); try { if ( IReportElementConstants.AUTOTEXT_PAGE.equalsIgnoreCase( (String) request.getNewObjectType( ) ) ) { autoTextItemHandle.setAutoTextType( DesignChoiceConstants.AUTO_TEXT_PAGE_NUMBER ); } else if ( IReportElementConstants.AUTOTEXT_TOTAL_PAGE_COUNT.equalsIgnoreCase( (String) request.getNewObjectType( ) ) ) { autoTextItemHandle.setAutoTextType( DesignChoiceConstants.AUTO_TEXT_TOTAL_PAGE ); } } catch ( SemanticException e ) { // TODO Auto-generated catch block e.printStackTrace( ); } setModel( autoTextItemHandle ); return super.preHandleMouseUp( ); } /* * (non-Javadoc) * * @seeorg.eclipse.birt.designer.internal.ui.editors.schematic.tools. * AbstractToolHandleExtends#preHandleMouseDown() */ public boolean preHandleMouseDown( ) { return false; } } public static class VariableToolExtends extends AbstractToolHandleExtends { public boolean preHandleMouseUp( ) { CreateRequest request = getRequest( ); if ( IReportElementConstants.AUTOTEXT_VARIABLE.equalsIgnoreCase( (String) request.getNewObjectType( ) ) ) { ModuleHandle reportHandle = SessionHandleAdapter.getInstance( ) .getReportDesignHandle( ); if(reportHandle instanceof ReportDesignHandle){ SelectVariableDialog dialog = new SelectVariableDialog( (ReportDesignHandle) SessionHandleAdapter.getInstance( ) .getReportDesignHandle( ) ); if ( dialog.open( ) == Dialog.OK ) { AutoTextHandle autoTextItemHandle = DesignElementFactory.getInstance( ) .newAutoText( null ); try { autoTextItemHandle.setPageVariable( (String) dialog.getResult( ) ); autoTextItemHandle.setAutoTextType( DesignChoiceConstants.AUTO_TEXT_PAGE_VARIABLE ); setModel( autoTextItemHandle ); } catch ( SemanticException e ) { ExceptionHandler.handle( e ); } } } return super.preHandleMouseUp( ); } return false; } /* * (non-Javadoc) * * @seeorg.eclipse.birt.designer.internal.ui.editors.schematic.tools. * AbstractToolHandleExtends#preHandleMouseDown() */ public boolean preHandleMouseDown( ) { return false; } } /** * Provides element building support for text element. */ public static class TextToolExtends extends AbstractToolHandleExtends { /* * (non-Javadoc) * * @seeorg.eclipse.birt.designer.internal.ui.editors.schematic.tools. * AbstractToolHandleExtends#preHandleMouseDown() */ private static final String DEFAULT_AUTHOR = Messages.getString( "TextExtendsTools.Message.DefaultAuthor" ); //$NON-NLS-1$ public boolean preHandleMouseUp( ) { String type = (String) getRequest( ).getNewObjectType( ); String text = null; ModuleHandle reportHandle = SessionHandleAdapter.getInstance( ) .getReportDesignHandle( ); // TextItemHandle textItemHandle = SessionHandleAdapter.getInstance( // ) // .getReportDesignHandle( ) // .getElementFactory( ) // .newTextItem( null ); TextItemHandle textItemHandle = DesignElementFactory.getInstance( ) .newTextItem( null ); try { // if ( IReportElementConstants.AUTOTEXT_PAGE.equalsIgnoreCase( // type ) ) // { // text = AUTOTEXT_LABEL_PAGE // + "<value-of>pageNumber</value-of>"; //$NON-NLS-1$ // textItemHandle.setContentType( // DesignChoiceConstants.TEXT_CONTENT_TYPE_HTML ); // // }else if ( IReportElementConstants.AUTOTEXT_DATE.equalsIgnoreCase( type ) ) { text = "<value-of>new Date()</value-of>"; //$NON-NLS-1$ textItemHandle.setContentType( DesignChoiceConstants.TEXT_CONTENT_TYPE_HTML ); } else if ( IReportElementConstants.AUTOTEXT_CREATEDON.equalsIgnoreCase( type ) ) { text = AUTOTEXT_LABEL_CREATE_ON + "<value-of>new Date()</value-of>"; //$NON-NLS-1$ textItemHandle.setContentType( DesignChoiceConstants.TEXT_CONTENT_TYPE_HTML ); } else if ( IReportElementConstants.AUTOTEXT_CREATEDBY.equalsIgnoreCase( type ) ) { text = AUTOTEXT_LABEL_CREATE_BY; String author = reportHandle.getAuthor( ); if ( author != null ) { text += author; } else { String pluginVersion = (String) ReportPlugin.getDefault( ) .getBundle( ) .getHeaders( ) .get( org.osgi.framework.Constants.BUNDLE_VERSION ); text += DEFAULT_AUTHOR + " " //$NON-NLS-1$ + pluginVersion; } } else if ( IReportElementConstants.AUTOTEXT_FILENAME.equalsIgnoreCase( type ) ) { text = AUTOTEXT_LABEL_FILENAME + reportHandle.getFileName( ); } else if ( IReportElementConstants.AUTOTEXT_LASTPRINTED.equalsIgnoreCase( type ) ) { text = AUTOTEXT_LABEL_LAST_PRINTED + "<value-of>new Date()</value-of>"; //$NON-NLS-1$ textItemHandle.setContentType( DesignChoiceConstants.TEXT_CONTENT_TYPE_HTML ); } // else if ( // IReportElementConstants.AUTOTEXT_TOTAL_PAGE_COUNT.equalsIgnoreCase( // type ) ) // { // text = AUTOTEXT_LABEL_PAGE_COUNT // + "<value-of>pageNumber</value-of>" // + " of" // + "<value-of>pageNumber</value-of>"; // } // else if ( // IReportElementConstants.AUTOTEXT_PAGEXOFY.equalsIgnoreCase( // type ) ) // { // text = AUTOTEXT_LABEL_PAGE_X_OF_Y // + "Page " // + "<value-of>pageNumber</value-of>" // + " of" // + "<value-of>pageNumber</value-of>"; // } else if ( !IReportElementConstants.REPORT_ELEMENT_TEXT.equalsIgnoreCase( type ) ) { return false; } if ( text != null ) { textItemHandle.setContent( text ); } } catch ( SemanticException e ) { ExceptionHandler.handle( e ); } setModel( textItemHandle ); return super.preHandleMouseUp( ); } /* * (non-Javadoc) * * @seeorg.eclipse.birt.designer.internal.ui.editors.schematic.tools. * AbstractToolHandleExtends#preHandleMouseDown() */ public boolean preHandleMouseDown( ) { return false; } } /** * Provides element building support for label element. */ public static class LabelToolExtends extends AbstractToolHandleExtends { public boolean preHandleMouseUp( ) { CreateRequest request = getRequest( ); if ( IReportElementConstants.REPORT_ELEMENT_LABEL.equalsIgnoreCase( (String) request.getNewObjectType( ) ) ) { // LabelHandle labelItemHandle = // SessionHandleAdapter.getInstance( ) // .getReportDesignHandle( ) // .getElementFactory( ) // .newLabel( null ); LabelHandle labelItemHandle = DesignElementFactory.getInstance( ) .newLabel( null ); setModel( labelItemHandle ); return super.preHandleMouseUp( ); } return false; } /* * (non-Javadoc) * * @seeorg.eclipse.birt.designer.internal.ui.editors.schematic.tools. * AbstractToolHandleExtends#preHandleMouseDown() */ public boolean preHandleMouseDown( ) { return false; } } /** * Provides element building support for TextData element. */ public static class TextDataToolExtends extends AbstractToolHandleExtends { public boolean preHandleMouseUp( ) { CreateRequest request = getRequest( ); if ( IReportElementConstants.REPORT_ELEMENT_TEXTDATA.equalsIgnoreCase( (String) request.getNewObjectType( ) ) ) { // LabelHandle labelItemHandle = // SessionHandleAdapter.getInstance( ) // .getReportDesignHandle( ) // .getElementFactory( ) // .newLabel( null ); TextDataHandle textItemHandle = DesignElementFactory.getInstance( ) .newTextData( null ); try { textItemHandle.setContentType( DesignChoiceConstants.TEXT_CONTENT_TYPE_HTML ); } catch ( SemanticException e ) { ExceptionHandler.handle( e ); return false; } setModel( textItemHandle ); return super.preHandleMouseUp( ); } return false; } /* * (non-Javadoc) * * @seeorg.eclipse.birt.designer.internal.ui.editors.schematic.tools. * AbstractToolHandleExtends#preHandleMouseDown() */ public boolean preHandleMouseDown( ) { return false; } } /** * Provides element building support for List element. */ public static class ListToolExtends extends AbstractToolHandleExtends { /* * (non-Javadoc) * * @seeorg.eclipse.birt.designer.internal.ui.editors.schematic.tools. * IToolHandleExtends#preHandleMouseDown() */ public boolean preHandleMouseUp( ) { CreateRequest request = getRequest( ); if ( IReportElementConstants.REPORT_ELEMENT_LIST.equalsIgnoreCase( (String) request.getNewObjectType( ) ) ) { // ListHandle list = SessionHandleAdapter.getInstance( ) // .getReportDesignHandle( ) // .getElementFactory( ) // .newList( null ); ListHandle list = DesignElementFactory.getInstance( ) .newList( null ); setModel( list ); return super.preHandleMouseUp( ); } return false; } /* * (non-Javadoc) * * @seeorg.eclipse.birt.designer.internal.ui.editors.schematic.tools. * AbstractToolHandleExtends#preHandleMouseDown() */ public boolean preHandleMouseDown( ) { return false; } } /** * Provides element building support for grid element. */ public static class GridToolExtends extends AbstractToolHandleExtends { /* * (non-Javadoc) * * @seeorg.eclipse.birt.designer.internal.ui.editors.schematic.tools. * IToolHandleExtends#preHandleMouseDown() */ public boolean preHandleMouseUp( ) { ModuleHandle reportDesignHandle = SessionHandleAdapter.getInstance( ) .getReportDesignHandle( ); // ElementFactory factory = reportDesignHandle.getElementFactory( ); DesignElementFactory factory = DesignElementFactory.getInstance( reportDesignHandle ); String type = (String) getRequest( ).getNewObjectType( ); GridHandle grid = null; if ( IReportElementConstants.AUTOTEXT_PAGEXOFY.equals( type ) ) { grid = factory.newGridItem( null, 3, 1 ); try { List cellList = ( (RowHandle) grid.getRows( ).get( 0 ) ).getCells( ) .getContents( ); AutoTextHandle autoTextHandle = factory.newAutoText( null ); autoTextHandle.setAutoTextType( DesignChoiceConstants.AUTO_TEXT_PAGE_NUMBER ); ( (CellHandle) cellList.get( 0 ) ).getContent( ) .add( autoTextHandle ); TextItemHandle textHandle = factory.newTextItem( null ); textHandle.setContent( "/" ); //$NON-NLS-1$ textHandle.setContentType( DesignChoiceConstants.TEXT_CONTENT_TYPE_PLAIN ); ( (CellHandle) cellList.get( 1 ) ).getContent( ) .add( textHandle ); autoTextHandle = factory.newAutoText( null ); autoTextHandle.setAutoTextType( DesignChoiceConstants.AUTO_TEXT_TOTAL_PAGE ); ( (CellHandle) cellList.get( 2 ) ).getContent( ) .add( autoTextHandle ); } catch ( SemanticException e ) { ExceptionHandler.handle( e ); } } else if ( IReportElementConstants.AUTOTEXT_AUTHOR_PAGE_DATE.equals( type ) ) { grid = factory.newGridItem( null, 3, 1 ); try { List cellList = ( (RowHandle) grid.getRows( ).get( 0 ) ).getCells( ) .getContents( ); TextItemHandle textHandle = factory.newTextItem( null ); String text = AUTOTEXT_LABEL_CREATE_BY; if ( reportDesignHandle.getAuthor( ) != null ) { text += reportDesignHandle.getAuthor( ); } textHandle.setContent( text ); ( (CellHandle) cellList.get( 0 ) ).getContent( ) .add( textHandle ); AutoTextHandle autoTextHandle = factory.newAutoText( null ); autoTextHandle.setAutoTextType( DesignChoiceConstants.AUTO_TEXT_PAGE_NUMBER ); ( (CellHandle) cellList.get( 1 ) ).getContent( ) .add( autoTextHandle ); textHandle = factory.newTextItem( null ); textHandle.setContent( "<value-of>new Date()</value-of>" ); //$NON-NLS-1$ textHandle.setContentType( DesignChoiceConstants.TEXT_CONTENT_TYPE_HTML ); ( (CellHandle) cellList.get( 2 ) ).getContent( ) .add( textHandle ); } catch ( SemanticException e ) { ExceptionHandler.handle( e ); } } else if ( IReportElementConstants.AUTOTEXT_CONFIDENTIAL_PAGE.equals( type ) ) { grid = factory.newGridItem( null, 2, 1 ); try { List cellList = ( (RowHandle) grid.getRows( ).get( 0 ) ).getCells( ) .getContents( ); TextItemHandle textHandle = factory.newTextItem( null ); textHandle.setContent( AUTOTEXT_TEXT_CONFIDENTIAL ); textHandle.setContentType( DesignChoiceConstants.TEXT_CONTENT_TYPE_HTML ); ( (CellHandle) cellList.get( 0 ) ).getContent( ) .add( textHandle ); AutoTextHandle autoTextHandle = factory.newAutoText( null ); autoTextHandle.setAutoTextType( DesignChoiceConstants.AUTO_TEXT_PAGE_NUMBER ); ( (CellHandle) cellList.get( 1 ) ).getContent( ) .add( autoTextHandle ); } catch ( SemanticException e ) { ExceptionHandler.handle( e ); } } else if ( IReportElementConstants.REPORT_ELEMENT_GRID.equals( type ) ) { TableOptionDialog dlg = new TableOptionDialog( UIUtil.getDefaultShell( ), false ); if ( dlg.open( ) == Window.OK && dlg.getResult( ) instanceof Object[] ) { Object[] data = (Object[]) dlg.getResult( ); grid = factory.newGridItem( null, ( (Integer) data[1] ).intValue( ), ( (Integer) data[0] ).intValue( ) ); } else { return false; } } else { return false; } InsertInLayoutUtil.setInitWidth( grid ); setModel( grid ); return super.preHandleMouseUp( ); } /* * (non-Javadoc) * * @seeorg.eclipse.birt.designer.internal.ui.editors.schematic.tools. * AbstractToolHandleExtends#preHandleMouseDown() */ public boolean preHandleMouseDown( ) { return false; } } /** * Provides element building support for data set. */ public static class DataSetToolExtends extends AbstractToolHandleExtends { /* * (non-Javadoc) * * @seeorg.eclipse.birt.designer.internal.ui.editors.schematic.tools. * IToolHandleExtends#preHandleMouseDown() */ public boolean preHandleMouseUp( ) { if ( getRequest( ).getNewObjectType( ) instanceof DataSetHandle ) { try { // add extended dataset element. Object newObj = getRequest( ).getNewObject( ); if ( newObj instanceof Object[] && ( (Object[]) newObj ).length > 0 ) { newObj = ( (Object[]) newObj )[0]; } DesignElementHandle elementHandle = (DesignElementHandle) newObj; ModuleHandle moduleHandle = SessionHandleAdapter.getInstance( ) .getReportDesignHandle( ); // element comes from library and not to itself. if ( elementHandle.getRoot( ) instanceof LibraryHandle && elementHandle.getRoot( ) != moduleHandle ) { LibraryHandle library = (LibraryHandle) elementHandle.getRoot( ); try { if ( UIUtil.includeLibrary( moduleHandle, library ) ) { elementHandle = moduleHandle.getElementFactory( ) .newElementFrom( elementHandle, elementHandle.getName( ) ); moduleHandle.addElement( elementHandle, moduleHandle.getDataSets( ).getSlotID( ) ); } } catch ( Exception e ) { ExceptionHandler.handle( e ); } } Object newHandle = InsertInLayoutUtil.performInsert( elementHandle, getTargetEditPart( ) ); if ( newHandle == null ) return false; setModel( newHandle ); return super.preHandleMouseUp( ); } catch ( SemanticException e ) { ExceptionHandler.handle( e ); } } return false; } /* * (non-Javadoc) * * @seeorg.eclipse.birt.designer.internal.ui.editors.schematic.tools. * AbstractToolHandleExtends#preHandleMouseDown() */ public boolean preHandleMouseDown( ) { return false; } } /** * for cross tab Provides element building support for data set. */ public static class DimensionHandleToolExtends extends AbstractToolHandleExtends { /* * (non-Javadoc) * * @seeorg.eclipse.birt.designer.internal.ui.editors.schematic.tools. * IToolHandleExtends#preHandleMouseDown() */ public boolean preHandleMouseUp( ) { if ( getRequest( ).getNewObjectType( ) instanceof DimensionHandle ) { // try // { // add extended dataset element. Object newObj = getRequest( ).getNewObject( ); if ( newObj instanceof Object[] && ( (Object[]) newObj ).length > 0 ) { newObj = ( (Object[]) newObj )[0]; } DesignElementHandle elementHandle = (DesignElementHandle) newObj; // ModuleHandle moduleHandle = SessionHandleAdapter.getInstance( // ) // .getReportDesignHandle( ); // // element comes from library and not to itself. // // Object newHandle = InsertInLayoutUtil.performInsert( // elementHandle, // getTargetEditPart( ) ); if ( elementHandle == null ) return false; setModel( elementHandle ); return super.preHandleMouseUp( ); // } // catch ( SemanticException e ) // { // ExceptionHandler.handle( e ); // } } return false; } /* * (non-Javadoc) * * @seeorg.eclipse.birt.designer.internal.ui.editors.schematic.tools. * AbstractToolHandleExtends#preHandleMouseDown() */ public boolean preHandleMouseDown( ) { return false; } } public static class MeasureHandleToolExtends extends AbstractToolHandleExtends { /* * (non-Javadoc) * * @seeorg.eclipse.birt.designer.internal.ui.editors.schematic.tools. * IToolHandleExtends#preHandleMouseDown() */ public boolean preHandleMouseUp( ) { if ( getRequest( ).getNewObjectType( ) instanceof MeasureHandle ) { // try // { // add extended dataset element. Object newObj = getRequest( ).getNewObject( ); if ( newObj instanceof Object[] && ( (Object[]) newObj ).length > 0 ) { newObj = ( (Object[]) newObj )[0]; } DesignElementHandle elementHandle = (DesignElementHandle) newObj; // ModuleHandle moduleHandle = SessionHandleAdapter.getInstance( // ) // .getReportDesignHandle( ); // // element comes from library and not to itself. // // Object newHandle = InsertInLayoutUtil.performInsert( // elementHandle, // getTargetEditPart( ) ); if ( elementHandle == null ) return false; setModel( elementHandle ); return super.preHandleMouseUp( ); // } // catch ( SemanticException e ) // { // ExceptionHandler.handle( e ); // } } return false; } /* * (non-Javadoc) * * @seeorg.eclipse.birt.designer.internal.ui.editors.schematic.tools. * AbstractToolHandleExtends#preHandleMouseDown() */ public boolean preHandleMouseDown( ) { return false; } } /** * Provides element building support for data set column. */ public static class DataSetColumnToolExtends extends AbstractToolHandleExtends { /* * (non-Javadoc) * * @seeorg.eclipse.birt.designer.internal.ui.editors.schematic.tools. * IToolHandleExtends#preHandleMouseDown() */ public boolean preHandleMouseUp( ) { if ( getRequest( ).getNewObjectType( ) instanceof DataSetItemModel || getRequest( ).getNewObjectType( ) instanceof ResultSetColumnHandle ) { try { Object newHandle = InsertInLayoutUtil.performInsert( getRequest( ).getNewObject( ), getTargetEditPart( ) ); if ( newHandle == null ) return false; setModel( newHandle ); return super.preHandleMouseUp( ); } catch ( SemanticException e ) { ExceptionHandler.handle( e ); } } return false; } /* * (non-Javadoc) * * @seeorg.eclipse.birt.designer.internal.ui.editors.schematic.tools. * AbstractToolHandleExtends#preHandleMouseDown() */ public boolean preHandleMouseDown( ) { return false; } } /** * Provides element building support for parameter. */ public static class ParameterToolExtends extends AbstractToolHandleExtends { /* * (non-Javadoc) * * @seeorg.eclipse.birt.designer.internal.ui.editors.schematic.tools. * IToolHandleExtends#preHandleMouseDown() */ public boolean preHandleMouseUp( ) { if ( getRequest( ).getNewObjectType( ) instanceof ScalarParameterHandle ) { try { Object newObj = getRequest( ).getNewObject( ); if ( newObj instanceof Object[] ) { Object[] newObjs = (Object[]) newObj; newObj = UIUtil.getInsertPamaterElements(newObjs); } Object newHandle = InsertInLayoutUtil.performInsert( newObj, getTargetEditPart( ) ); if ( newHandle == null ) return false; setModel( newHandle ); return super.preHandleMouseUp( ); } catch ( SemanticException e ) { ExceptionHandler.handle( e ); } } return false; } /* * (non-Javadoc) * * @seeorg.eclipse.birt.designer.internal.ui.editors.schematic.tools. * AbstractToolHandleExtends#preHandleMouseDown() */ public boolean preHandleMouseDown( ) { return false; } } public static AbstractToolHandleExtends getAbstractToolHandleExtendsFromPaletteName( Object str ) { if ( !( str instanceof String ) ) { throw new Error( "Don't support the other type expect String" ); //$NON-NLS-1$ } String template = (String) str; AbstractToolHandleExtends preHandle = null; if ( IReportElementConstants.REPORT_ELEMENT_IMAGE.equalsIgnoreCase( template ) ) { preHandle = new ImageToolExtends( ); } else if ( IReportElementConstants.REPORT_ELEMENT_TABLE.equalsIgnoreCase( template ) ) { preHandle = new TableToolExtends( ); } else if ( IReportElementConstants.REPORT_ELEMENT_TEXT.equalsIgnoreCase( template ) || IReportElementConstants.AUTOTEXT_DATE.equalsIgnoreCase( template ) || IReportElementConstants.AUTOTEXT_CREATEDON.equalsIgnoreCase( template ) || IReportElementConstants.AUTOTEXT_CREATEDBY.equalsIgnoreCase( template ) || IReportElementConstants.AUTOTEXT_FILENAME.equalsIgnoreCase( template ) || IReportElementConstants.AUTOTEXT_LASTPRINTED.equalsIgnoreCase( template ) ) { preHandle = new TextToolExtends( ); } else if ( IReportElementConstants.AUTOTEXT_PAGE.equalsIgnoreCase( template ) || IReportElementConstants.AUTOTEXT_TOTAL_PAGE_COUNT.equalsIgnoreCase( template ) ) { preHandle = new AutoTextToolExtends( ); } else if ( IReportElementConstants.REPORT_ELEMENT_TEXTDATA.equalsIgnoreCase( template ) ) { preHandle = new TextDataToolExtends( ); } else if ( IReportElementConstants.AUTOTEXT_AUTHOR_PAGE_DATE.equalsIgnoreCase( template ) || IReportElementConstants.AUTOTEXT_CONFIDENTIAL_PAGE.equalsIgnoreCase( template ) || IReportElementConstants.REPORT_ELEMENT_GRID.equalsIgnoreCase( template ) || IReportElementConstants.AUTOTEXT_PAGEXOFY.equalsIgnoreCase( template ) ) { preHandle = new GridToolExtends( ); } else if ( IReportElementConstants.REPORT_ELEMENT_LABEL.equalsIgnoreCase( template ) ) { preHandle = new LabelToolExtends( ); } else if ( IReportElementConstants.REPORT_ELEMENT_DATA.equalsIgnoreCase( template ) ) { preHandle = new DataToolExtends( ); } else if ( IReportElementConstants.REPORT_ELEMENT_LIST.equalsIgnoreCase( template ) ) { preHandle = new ListToolExtends( ); } else if ( ( template ).startsWith( IReportElementConstants.REPORT_ELEMENT_EXTENDED ) ) { String extensionName = template.substring( IReportElementConstants.REPORT_ELEMENT_EXTENDED.length( ) ); preHandle = new ExtendedElementToolExtends( extensionName ); } else if ( IReportElementConstants.AUTOTEXT_VARIABLE.equalsIgnoreCase( template ) ) { preHandle = new VariableToolExtends( ); } if ( preHandle == null ) { throw new Error( "Don't find the AbstractToolHandleExtends" ); //$NON-NLS-1$ } return preHandle; } }
UI/org.eclipse.birt.report.designer.ui/src/org/eclipse/birt/report/designer/internal/ui/palette/BasePaletteFactory.java
/************************************************************************************* * Copyright (c) 2004 Actuate Corporation and others. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html * * Contributors: * Actuate Corporation - Initial implementation. ************************************************************************************/ package org.eclipse.birt.report.designer.internal.ui.palette; import java.util.ArrayList; import java.util.List; import org.eclipse.birt.report.designer.core.IReportElementConstants; import org.eclipse.birt.report.designer.core.model.SessionHandleAdapter; import org.eclipse.birt.report.designer.core.model.schematic.ListBandProxy; import org.eclipse.birt.report.designer.core.model.views.data.DataSetItemModel; import org.eclipse.birt.report.designer.internal.ui.dialogs.TableOptionDialog; import org.eclipse.birt.report.designer.internal.ui.dnd.InsertInLayoutUtil; import org.eclipse.birt.report.designer.internal.ui.editors.schematic.extensions.ExtendedElementToolExtends; import org.eclipse.birt.report.designer.internal.ui.editors.schematic.tools.AbstractToolHandleExtends; import org.eclipse.birt.report.designer.internal.ui.util.ExceptionHandler; import org.eclipse.birt.report.designer.internal.ui.util.UIUtil; import org.eclipse.birt.report.designer.internal.ui.views.dialogs.provider.DataSetColumnBindingsFormHandleProvider; import org.eclipse.birt.report.designer.nls.Messages; import org.eclipse.birt.report.designer.ui.ReportPlugin; import org.eclipse.birt.report.designer.ui.dialogs.SelectVariableDialog; import org.eclipse.birt.report.designer.ui.newelement.DesignElementFactory; import org.eclipse.birt.report.model.api.AutoTextHandle; import org.eclipse.birt.report.model.api.CellHandle; import org.eclipse.birt.report.model.api.DataItemHandle; import org.eclipse.birt.report.model.api.DataSetHandle; import org.eclipse.birt.report.model.api.DesignElementHandle; import org.eclipse.birt.report.model.api.GridHandle; import org.eclipse.birt.report.model.api.ImageHandle; import org.eclipse.birt.report.model.api.LabelHandle; import org.eclipse.birt.report.model.api.LibraryHandle; import org.eclipse.birt.report.model.api.ListHandle; import org.eclipse.birt.report.model.api.ModuleHandle; import org.eclipse.birt.report.model.api.ReportDesignHandle; import org.eclipse.birt.report.model.api.ReportItemHandle; import org.eclipse.birt.report.model.api.ResultSetColumnHandle; import org.eclipse.birt.report.model.api.RowHandle; import org.eclipse.birt.report.model.api.ScalarParameterHandle; import org.eclipse.birt.report.model.api.TableHandle; import org.eclipse.birt.report.model.api.TextDataHandle; import org.eclipse.birt.report.model.api.TextItemHandle; import org.eclipse.birt.report.model.api.activity.SemanticException; import org.eclipse.birt.report.model.api.elements.DesignChoiceConstants; import org.eclipse.birt.report.model.api.olap.DimensionHandle; import org.eclipse.birt.report.model.api.olap.MeasureHandle; import org.eclipse.gef.palette.MarqueeToolEntry; import org.eclipse.gef.palette.PaletteContainer; import org.eclipse.gef.palette.PaletteGroup; import org.eclipse.gef.palette.PaletteRoot; import org.eclipse.gef.palette.PanningSelectionToolEntry; import org.eclipse.gef.palette.ToolEntry; import org.eclipse.gef.requests.CreateRequest; import org.eclipse.jface.dialogs.Dialog; import org.eclipse.jface.window.Window; /** * is the base class of Palette factory. Creates default tools here. */ public class BasePaletteFactory { private static final String POINTER_SELECT_LABEL = Messages.getString( "BasePaletteFactory.categoryName.PointerSelect" );//$NON-NLS-1$ private static final String RECTANGEL_SELECT_LABEL = Messages.getString( "BasePaletteFactory.categoryName.RectangleSelect" );//$NON-NLS-1$ private static final String TOOL_TIP_POINTER_SELECT = Messages.getString( "BasePaletteFactory.toolTip.PointerSelect" );//$NON-NLS-1$ private static final String TOOL_TIP_RECTANGLE_SELECT = Messages.getString( "BasePaletteFactory.toolTip.RectangleSelect" );//$NON-NLS-1$ private static final String PALETTE_GROUP_TEXT = Messages.getString( "BasePaletteFactory.Group.Items" ); //$NON-NLS-1$ private static final String AUTOTEXT_LABEL_PAGE_X_OF_Y = Messages.getString( "BasePaletteFactory.AutoTextLabel.PageXofY" ); //$NON-NLS-1$ private static final String AUTOTEXT_LABEL_PAGE_COUNT = Messages.getString( "BasePaletteFactory.AutoTextLabel.PageCount" ); //$NON-NLS-1$ private static final String AUTOTEXT_LABEL_LAST_PRINTED = Messages.getString( "BasePaletteFactory.AutoTextLabel.LastPrinted" ); //$NON-NLS-1$ private static final String AUTOTEXT_LABEL_FILENAME = Messages.getString( "BasePaletteFactory.AutoTextLabel.Filename" ); //$NON-NLS-1$ private static final String AUTOTEXT_LABEL_CREATE_BY = Messages.getString( "BasePaletteFactory.AutoTextLabel.CreatedBy" ); //$NON-NLS-1$ private static final String AUTOTEXT_LABEL_CREATE_ON = Messages.getString( "BasePaletteFactory.AutoTextLabel.CreatedOn" ); //$NON-NLS-1$ private static final String AUTOTEXT_LABEL_PAGE = Messages.getString( "BasePaletteFactory.AutoTextLabel.Page" ); //$NON-NLS-1$ private static final String AUTOTEXT_TEXT_CONFIDENTIAL = Messages.getString( "BasePaletteFactory.AutoText.Confidential" ); //$NON-NLS-1$ protected static PaletteRoot createPalette( ) { PaletteRoot root = new PaletteRoot( ); root.add( createControlGroup( root ) ); return root; } /** * Creates default tools category, which include selection and marquee tools * * @param root * the root * @return PaletteContainer contains default tools */ protected static PaletteContainer createControlGroup( PaletteRoot root ) { PaletteGroup controlGroup = new PaletteGroup( PALETTE_GROUP_TEXT ); List entries = new ArrayList( ); ToolEntry tool = new PanningSelectionToolEntry( POINTER_SELECT_LABEL, TOOL_TIP_POINTER_SELECT ); entries.add( tool ); root.setDefaultEntry( tool ); tool = new MarqueeToolEntry( RECTANGEL_SELECT_LABEL, TOOL_TIP_RECTANGLE_SELECT ); entries.add( tool ); controlGroup.addAll( entries ); return controlGroup; } /** * Provides element building support for table element. */ public static class TableToolExtends extends AbstractToolHandleExtends { /* * (non-Javadoc) * * @seeorg.eclipse.birt.designer.internal.ui.editors.schematic.tools. * IToolHandleExtends#preHandleMouseDown() */ public boolean preHandleMouseUp( ) { CreateRequest request = getRequest( ); if ( IReportElementConstants.REPORT_ELEMENT_TABLE.equalsIgnoreCase( (String) request.getNewObjectType( ) ) ) { TableOptionDialog dlg = new TableOptionDialog( UIUtil.getDefaultShell( ), true ); if ( dlg.open( ) == Window.OK && dlg.getResult( ) instanceof Object[] ) { Object[] data = (Object[]) dlg.getResult( ); // String name = ReportPlugin.getDefault().getCustomName( // ReportDesignConstants.TABLE_ITEM); // // TableHandle table = SessionHandleAdapter.getInstance( ) // .getReportDesignHandle( ) // .getElementFactory( ) // .newTableItem( name, data[1], 1, data[0], 1 ); TableHandle table = DesignElementFactory.getInstance( ) .newTableItem( null, ( (Integer) data[1] ).intValue( ), 1, ( (Integer) data[0] ).intValue( ), 1 ); InsertInLayoutUtil.setInitWidth( table ); if ( data[2] != null ) { try { DataSetHandle dataSet = SessionHandleAdapter.getInstance( ) .getReportDesignHandle( ) .findDataSet( data[2].toString( ) ); ( (ReportItemHandle) table ).setDataSet( dataSet ); DataSetColumnBindingsFormHandleProvider provider = new DataSetColumnBindingsFormHandleProvider( ); provider.setBindingObject( table ); provider.generateAllBindingColumns( ); } catch ( Exception e ) { ExceptionHandler.handle( e ); } } setModel( table ); return super.preHandleMouseUp( ); } } return false; } /* * (non-Javadoc) * * @seeorg.eclipse.birt.designer.internal.ui.editors.schematic.tools. * AbstractToolHandleExtends#preHandleMouseDown() */ public boolean preHandleMouseDown( ) { return false; } } /** * Provides element building support for Data element. */ public static class DataToolExtends extends AbstractToolHandleExtends { /* * (non-Javadoc) * * @seeorg.eclipse.birt.designer.internal.ui.editors.schematic.tools. * IToolHandleExtends#preHandleMouseDown() */ public boolean preHandleMouseUp( ) { CreateRequest request = getRequest( ); if ( IReportElementConstants.REPORT_ELEMENT_DATA.equalsIgnoreCase( (String) request.getNewObjectType( ) ) ) { // DataItemHandle dataHandle = SessionHandleAdapter.getInstance( // ) // .getReportDesignHandle( ) // .getElementFactory( ) // .newDataItem( null ); DataItemHandle dataHandle = DesignElementFactory.getInstance( ) .newDataItem( null ); setModel( dataHandle ); // disable this dialog // dialog will pop-up after image is create // see ReportCreationTool.selectAddedObject() // BindingColumnDialog dialog = new BindingColumnDialog( true ); // dialog.setInput( dataHandle ); // if ( dialog.open( ) == Window.OK ) // { return super.preHandleMouseUp( ); // } } return false; } /* * (non-Javadoc) * * @seeorg.eclipse.birt.designer.internal.ui.editors.schematic.tools. * AbstractToolHandleExtends#preHandleMouseDown() */ public boolean preHandleMouseDown( ) { // TODO Auto-generated method stub return false; } } /** * Provides element building support for Image element. */ public static class ImageToolExtends extends AbstractToolHandleExtends { private List dataSetList = new ArrayList( ); /* * get target design element handle */ private DesignElementHandle getDesignElementHandle( ) { Object model = getTargetEditPart( ).getModel( ); DesignElementHandle desginElementHandle = null; if ( model instanceof DesignElementHandle ) { desginElementHandle = (DesignElementHandle) ( model ); } else if ( model instanceof ListBandProxy ) { desginElementHandle = ( (ListBandProxy) ( model ) ).getSlotHandle( ) .getElementHandle( ); } return desginElementHandle; } /* * (non-Javadoc) * * @seeorg.eclipse.birt.designer.internal.ui.editors.schematic.tools. * IToolHandleExtends#preHandleMouseDown() */ public boolean preHandleMouseUp( ) { // CreateRequest request = getRequest( ); // // DesignElementHandle desginElementHandle = getDesignElementHandle( // ); // dataSetList = DEUtil.getDataSetList( desginElementHandle ); // // if ( // IReportElementConstants.REPORT_ELEMENT_IMAGE.equalsIgnoreCase( // (String) request.getNewObjectType( ) ) ) // { // // Open the builder for new image // ImageBuilder dialog = new ImageBuilder( UIUtil.getDefaultShell( // ), // ImageBuilder.DLG_TITLE_NEW, // dataSetList ); // if ( Window.OK == dialog.open( ) ) // { // setModel( dialog.getResult( ) ); // // // If the dialog popup, mouse up event will not be called // // automatically, call it explicit // return super.preHandleMouseUp( ); // } // } // return false; ImageHandle dataHandle = DesignElementFactory.getInstance( ) .newImage( null ); setModel( dataHandle ); return super.preHandleMouseUp( ); } /* * (non-Javadoc) * * @seeorg.eclipse.birt.designer.internal.ui.editors.schematic.tools. * AbstractToolHandleExtends#preHandleMouseDown() */ public boolean preHandleMouseDown( ) { // TODO Auto-generated method stub return false; } } public static class AutoTextToolExtends extends AbstractToolHandleExtends { public boolean preHandleMouseUp( ) { CreateRequest request = getRequest( ); AutoTextHandle autoTextItemHandle = DesignElementFactory.getInstance( ) .newAutoText( null ); try { if ( IReportElementConstants.AUTOTEXT_PAGE.equalsIgnoreCase( (String) request.getNewObjectType( ) ) ) { autoTextItemHandle.setAutoTextType( DesignChoiceConstants.AUTO_TEXT_PAGE_NUMBER ); } else if ( IReportElementConstants.AUTOTEXT_TOTAL_PAGE_COUNT.equalsIgnoreCase( (String) request.getNewObjectType( ) ) ) { autoTextItemHandle.setAutoTextType( DesignChoiceConstants.AUTO_TEXT_TOTAL_PAGE ); } } catch ( SemanticException e ) { // TODO Auto-generated catch block e.printStackTrace( ); } setModel( autoTextItemHandle ); return super.preHandleMouseUp( ); } /* * (non-Javadoc) * * @seeorg.eclipse.birt.designer.internal.ui.editors.schematic.tools. * AbstractToolHandleExtends#preHandleMouseDown() */ public boolean preHandleMouseDown( ) { return false; } } public static class VariableToolExtends extends AbstractToolHandleExtends { public boolean preHandleMouseUp( ) { CreateRequest request = getRequest( ); if ( IReportElementConstants.AUTOTEXT_VARIABLE.equalsIgnoreCase( (String) request.getNewObjectType( ) ) ) { ModuleHandle reportHandle = SessionHandleAdapter.getInstance( ) .getReportDesignHandle( ); if(reportHandle instanceof ReportDesignHandle){ SelectVariableDialog dialog = new SelectVariableDialog( (ReportDesignHandle) SessionHandleAdapter.getInstance( ) .getReportDesignHandle( ) ); if ( dialog.open( ) == Dialog.OK ) { AutoTextHandle autoTextItemHandle = DesignElementFactory.getInstance( ) .newAutoText( null ); try { autoTextItemHandle.setPageVariable( (String) dialog.getResult( ) ); autoTextItemHandle.setAutoTextType( DesignChoiceConstants.AUTO_TEXT_PAGE_VARIABLE ); setModel( autoTextItemHandle ); } catch ( SemanticException e ) { ExceptionHandler.handle( e ); } } } return super.preHandleMouseUp( ); } return false; } /* * (non-Javadoc) * * @seeorg.eclipse.birt.designer.internal.ui.editors.schematic.tools. * AbstractToolHandleExtends#preHandleMouseDown() */ public boolean preHandleMouseDown( ) { return false; } } /** * Provides element building support for text element. */ public static class TextToolExtends extends AbstractToolHandleExtends { /* * (non-Javadoc) * * @seeorg.eclipse.birt.designer.internal.ui.editors.schematic.tools. * AbstractToolHandleExtends#preHandleMouseDown() */ private static final String DEFAULT_AUTHOR = Messages.getString( "TextExtendsTools.Message.DefaultAuthor" ); //$NON-NLS-1$ public boolean preHandleMouseUp( ) { String type = (String) getRequest( ).getNewObjectType( ); String text = null; ModuleHandle reportHandle = SessionHandleAdapter.getInstance( ) .getReportDesignHandle( ); // TextItemHandle textItemHandle = SessionHandleAdapter.getInstance( // ) // .getReportDesignHandle( ) // .getElementFactory( ) // .newTextItem( null ); TextItemHandle textItemHandle = DesignElementFactory.getInstance( ) .newTextItem( null ); try { // if ( IReportElementConstants.AUTOTEXT_PAGE.equalsIgnoreCase( // type ) ) // { // text = AUTOTEXT_LABEL_PAGE // + "<value-of>pageNumber</value-of>"; //$NON-NLS-1$ // textItemHandle.setContentType( // DesignChoiceConstants.TEXT_CONTENT_TYPE_HTML ); // // }else if ( IReportElementConstants.AUTOTEXT_DATE.equalsIgnoreCase( type ) ) { text = "<value-of>new Date()</value-of>"; //$NON-NLS-1$ textItemHandle.setContentType( DesignChoiceConstants.TEXT_CONTENT_TYPE_HTML ); } else if ( IReportElementConstants.AUTOTEXT_CREATEDON.equalsIgnoreCase( type ) ) { text = AUTOTEXT_LABEL_CREATE_ON + "<value-of>new Date()</value-of>"; //$NON-NLS-1$ textItemHandle.setContentType( DesignChoiceConstants.TEXT_CONTENT_TYPE_HTML ); } else if ( IReportElementConstants.AUTOTEXT_CREATEDBY.equalsIgnoreCase( type ) ) { text = AUTOTEXT_LABEL_CREATE_BY; String author = reportHandle.getAuthor( ); if ( author != null ) { text += author; } else { String pluginVersion = (String) ReportPlugin.getDefault( ) .getBundle( ) .getHeaders( ) .get( org.osgi.framework.Constants.BUNDLE_VERSION ); text += DEFAULT_AUTHOR + " " //$NON-NLS-1$ + pluginVersion; } } else if ( IReportElementConstants.AUTOTEXT_FILENAME.equalsIgnoreCase( type ) ) { text = AUTOTEXT_LABEL_FILENAME + reportHandle.getFileName( ); } else if ( IReportElementConstants.AUTOTEXT_LASTPRINTED.equalsIgnoreCase( type ) ) { text = AUTOTEXT_LABEL_LAST_PRINTED + "<value-of>new Date()</value-of>"; //$NON-NLS-1$ textItemHandle.setContentType( DesignChoiceConstants.TEXT_CONTENT_TYPE_HTML ); } // else if ( // IReportElementConstants.AUTOTEXT_TOTAL_PAGE_COUNT.equalsIgnoreCase( // type ) ) // { // text = AUTOTEXT_LABEL_PAGE_COUNT // + "<value-of>pageNumber</value-of>" // + " of" // + "<value-of>pageNumber</value-of>"; // } // else if ( // IReportElementConstants.AUTOTEXT_PAGEXOFY.equalsIgnoreCase( // type ) ) // { // text = AUTOTEXT_LABEL_PAGE_X_OF_Y // + "Page " // + "<value-of>pageNumber</value-of>" // + " of" // + "<value-of>pageNumber</value-of>"; // } else if ( !IReportElementConstants.REPORT_ELEMENT_TEXT.equalsIgnoreCase( type ) ) { return false; } if ( text != null ) { textItemHandle.setContent( text ); } } catch ( SemanticException e ) { ExceptionHandler.handle( e ); } setModel( textItemHandle ); return super.preHandleMouseUp( ); } /* * (non-Javadoc) * * @seeorg.eclipse.birt.designer.internal.ui.editors.schematic.tools. * AbstractToolHandleExtends#preHandleMouseDown() */ public boolean preHandleMouseDown( ) { return false; } } /** * Provides element building support for label element. */ public static class LabelToolExtends extends AbstractToolHandleExtends { public boolean preHandleMouseUp( ) { CreateRequest request = getRequest( ); if ( IReportElementConstants.REPORT_ELEMENT_LABEL.equalsIgnoreCase( (String) request.getNewObjectType( ) ) ) { // LabelHandle labelItemHandle = // SessionHandleAdapter.getInstance( ) // .getReportDesignHandle( ) // .getElementFactory( ) // .newLabel( null ); LabelHandle labelItemHandle = DesignElementFactory.getInstance( ) .newLabel( null ); setModel( labelItemHandle ); return super.preHandleMouseUp( ); } return false; } /* * (non-Javadoc) * * @seeorg.eclipse.birt.designer.internal.ui.editors.schematic.tools. * AbstractToolHandleExtends#preHandleMouseDown() */ public boolean preHandleMouseDown( ) { return false; } } /** * Provides element building support for TextData element. */ public static class TextDataToolExtends extends AbstractToolHandleExtends { public boolean preHandleMouseUp( ) { CreateRequest request = getRequest( ); if ( IReportElementConstants.REPORT_ELEMENT_TEXTDATA.equalsIgnoreCase( (String) request.getNewObjectType( ) ) ) { // LabelHandle labelItemHandle = // SessionHandleAdapter.getInstance( ) // .getReportDesignHandle( ) // .getElementFactory( ) // .newLabel( null ); TextDataHandle textItemHandle = DesignElementFactory.getInstance( ) .newTextData( null ); try { textItemHandle.setContentType( DesignChoiceConstants.TEXT_CONTENT_TYPE_HTML ); } catch ( SemanticException e ) { ExceptionHandler.handle( e ); return false; } setModel( textItemHandle ); return super.preHandleMouseUp( ); } return false; } /* * (non-Javadoc) * * @seeorg.eclipse.birt.designer.internal.ui.editors.schematic.tools. * AbstractToolHandleExtends#preHandleMouseDown() */ public boolean preHandleMouseDown( ) { return false; } } /** * Provides element building support for List element. */ public static class ListToolExtends extends AbstractToolHandleExtends { /* * (non-Javadoc) * * @seeorg.eclipse.birt.designer.internal.ui.editors.schematic.tools. * IToolHandleExtends#preHandleMouseDown() */ public boolean preHandleMouseUp( ) { CreateRequest request = getRequest( ); if ( IReportElementConstants.REPORT_ELEMENT_LIST.equalsIgnoreCase( (String) request.getNewObjectType( ) ) ) { // ListHandle list = SessionHandleAdapter.getInstance( ) // .getReportDesignHandle( ) // .getElementFactory( ) // .newList( null ); ListHandle list = DesignElementFactory.getInstance( ) .newList( null ); setModel( list ); return super.preHandleMouseUp( ); } return false; } /* * (non-Javadoc) * * @seeorg.eclipse.birt.designer.internal.ui.editors.schematic.tools. * AbstractToolHandleExtends#preHandleMouseDown() */ public boolean preHandleMouseDown( ) { return false; } } /** * Provides element building support for grid element. */ public static class GridToolExtends extends AbstractToolHandleExtends { /* * (non-Javadoc) * * @seeorg.eclipse.birt.designer.internal.ui.editors.schematic.tools. * IToolHandleExtends#preHandleMouseDown() */ public boolean preHandleMouseUp( ) { ModuleHandle reportDesignHandle = SessionHandleAdapter.getInstance( ) .getReportDesignHandle( ); // ElementFactory factory = reportDesignHandle.getElementFactory( ); DesignElementFactory factory = DesignElementFactory.getInstance( reportDesignHandle ); String type = (String) getRequest( ).getNewObjectType( ); GridHandle grid = null; if ( IReportElementConstants.AUTOTEXT_PAGEXOFY.equals( type ) ) { grid = factory.newGridItem( null, 3, 1 ); try { List cellList = ( (RowHandle) grid.getRows( ).get( 0 ) ).getCells( ) .getContents( ); AutoTextHandle autoTextHandle = factory.newAutoText( null ); autoTextHandle.setAutoTextType( DesignChoiceConstants.AUTO_TEXT_PAGE_NUMBER ); ( (CellHandle) cellList.get( 0 ) ).getContent( ) .add( autoTextHandle ); TextItemHandle textHandle = factory.newTextItem( null ); textHandle.setContent( "/" ); //$NON-NLS-1$ textHandle.setContentType( DesignChoiceConstants.TEXT_CONTENT_TYPE_PLAIN ); ( (CellHandle) cellList.get( 1 ) ).getContent( ) .add( textHandle ); autoTextHandle = factory.newAutoText( null ); autoTextHandle.setAutoTextType( DesignChoiceConstants.AUTO_TEXT_TOTAL_PAGE ); ( (CellHandle) cellList.get( 2 ) ).getContent( ) .add( autoTextHandle ); } catch ( SemanticException e ) { ExceptionHandler.handle( e ); } } else if ( IReportElementConstants.AUTOTEXT_AUTHOR_PAGE_DATE.equals( type ) ) { grid = factory.newGridItem( null, 3, 1 ); try { List cellList = ( (RowHandle) grid.getRows( ).get( 0 ) ).getCells( ) .getContents( ); TextItemHandle textHandle = factory.newTextItem( null ); String text = AUTOTEXT_LABEL_CREATE_BY; if ( reportDesignHandle.getAuthor( ) != null ) { text += reportDesignHandle.getAuthor( ); } textHandle.setContent( text ); ( (CellHandle) cellList.get( 0 ) ).getContent( ) .add( textHandle ); AutoTextHandle autoTextHandle = factory.newAutoText( null ); autoTextHandle.setAutoTextType( DesignChoiceConstants.AUTO_TEXT_PAGE_NUMBER ); ( (CellHandle) cellList.get( 1 ) ).getContent( ) .add( autoTextHandle ); textHandle = factory.newTextItem( null ); textHandle.setContent( "<value-of>new Date()</value-of>" ); //$NON-NLS-1$ textHandle.setContentType( DesignChoiceConstants.TEXT_CONTENT_TYPE_HTML ); ( (CellHandle) cellList.get( 2 ) ).getContent( ) .add( textHandle ); } catch ( SemanticException e ) { ExceptionHandler.handle( e ); } } else if ( IReportElementConstants.AUTOTEXT_CONFIDENTIAL_PAGE.equals( type ) ) { grid = factory.newGridItem( null, 2, 1 ); try { List cellList = ( (RowHandle) grid.getRows( ).get( 0 ) ).getCells( ) .getContents( ); TextItemHandle textHandle = factory.newTextItem( null ); textHandle.setContent( AUTOTEXT_TEXT_CONFIDENTIAL ); textHandle.setContentType( DesignChoiceConstants.TEXT_CONTENT_TYPE_HTML ); ( (CellHandle) cellList.get( 0 ) ).getContent( ) .add( textHandle ); AutoTextHandle autoTextHandle = factory.newAutoText( null ); autoTextHandle.setAutoTextType( DesignChoiceConstants.AUTO_TEXT_PAGE_NUMBER ); ( (CellHandle) cellList.get( 1 ) ).getContent( ) .add( autoTextHandle ); } catch ( SemanticException e ) { ExceptionHandler.handle( e ); } } else if ( IReportElementConstants.REPORT_ELEMENT_GRID.equals( type ) ) { TableOptionDialog dlg = new TableOptionDialog( UIUtil.getDefaultShell( ), false ); if ( dlg.open( ) == Window.OK && dlg.getResult( ) instanceof Object[] ) { Object[] data = (Object[]) dlg.getResult( ); grid = factory.newGridItem( null, ( (Integer) data[1] ).intValue( ), ( (Integer) data[0] ).intValue( ) ); } else { return false; } } else { return false; } InsertInLayoutUtil.setInitWidth( grid ); setModel( grid ); return super.preHandleMouseUp( ); } /* * (non-Javadoc) * * @seeorg.eclipse.birt.designer.internal.ui.editors.schematic.tools. * AbstractToolHandleExtends#preHandleMouseDown() */ public boolean preHandleMouseDown( ) { return false; } } /** * Provides element building support for data set. */ public static class DataSetToolExtends extends AbstractToolHandleExtends { /* * (non-Javadoc) * * @seeorg.eclipse.birt.designer.internal.ui.editors.schematic.tools. * IToolHandleExtends#preHandleMouseDown() */ public boolean preHandleMouseUp( ) { if ( getRequest( ).getNewObjectType( ) instanceof DataSetHandle ) { try { // add extended dataset element. Object newObj = getRequest( ).getNewObject( ); if ( newObj instanceof Object[] && ( (Object[]) newObj ).length > 0 ) { newObj = ( (Object[]) newObj )[0]; } DesignElementHandle elementHandle = (DesignElementHandle) newObj; ModuleHandle moduleHandle = SessionHandleAdapter.getInstance( ) .getReportDesignHandle( ); // element comes from library and not to itself. if ( elementHandle.getRoot( ) instanceof LibraryHandle && elementHandle.getRoot( ) != moduleHandle ) { LibraryHandle library = (LibraryHandle) elementHandle.getRoot( ); try { if ( UIUtil.includeLibrary( moduleHandle, library ) ) { elementHandle = moduleHandle.getElementFactory( ) .newElementFrom( elementHandle, elementHandle.getName( ) ); moduleHandle.addElement( elementHandle, moduleHandle.getDataSets( ).getSlotID( ) ); } } catch ( Exception e ) { ExceptionHandler.handle( e ); } } Object newHandle = InsertInLayoutUtil.performInsert( elementHandle, getTargetEditPart( ) ); if ( newHandle == null ) return false; setModel( newHandle ); return super.preHandleMouseUp( ); } catch ( SemanticException e ) { ExceptionHandler.handle( e ); } } return false; } /* * (non-Javadoc) * * @seeorg.eclipse.birt.designer.internal.ui.editors.schematic.tools. * AbstractToolHandleExtends#preHandleMouseDown() */ public boolean preHandleMouseDown( ) { return false; } } /** * for cross tab Provides element building support for data set. */ public static class DimensionHandleToolExtends extends AbstractToolHandleExtends { /* * (non-Javadoc) * * @seeorg.eclipse.birt.designer.internal.ui.editors.schematic.tools. * IToolHandleExtends#preHandleMouseDown() */ public boolean preHandleMouseUp( ) { if ( getRequest( ).getNewObjectType( ) instanceof DimensionHandle ) { // try // { // add extended dataset element. Object newObj = getRequest( ).getNewObject( ); if ( newObj instanceof Object[] && ( (Object[]) newObj ).length > 0 ) { newObj = ( (Object[]) newObj )[0]; } DesignElementHandle elementHandle = (DesignElementHandle) newObj; // ModuleHandle moduleHandle = SessionHandleAdapter.getInstance( // ) // .getReportDesignHandle( ); // // element comes from library and not to itself. // // Object newHandle = InsertInLayoutUtil.performInsert( // elementHandle, // getTargetEditPart( ) ); if ( elementHandle == null ) return false; setModel( elementHandle ); return super.preHandleMouseUp( ); // } // catch ( SemanticException e ) // { // ExceptionHandler.handle( e ); // } } return false; } /* * (non-Javadoc) * * @seeorg.eclipse.birt.designer.internal.ui.editors.schematic.tools. * AbstractToolHandleExtends#preHandleMouseDown() */ public boolean preHandleMouseDown( ) { return false; } } public static class MeasureHandleToolExtends extends AbstractToolHandleExtends { /* * (non-Javadoc) * * @seeorg.eclipse.birt.designer.internal.ui.editors.schematic.tools. * IToolHandleExtends#preHandleMouseDown() */ public boolean preHandleMouseUp( ) { if ( getRequest( ).getNewObjectType( ) instanceof MeasureHandle ) { // try // { // add extended dataset element. Object newObj = getRequest( ).getNewObject( ); if ( newObj instanceof Object[] && ( (Object[]) newObj ).length > 0 ) { newObj = ( (Object[]) newObj )[0]; } DesignElementHandle elementHandle = (DesignElementHandle) newObj; // ModuleHandle moduleHandle = SessionHandleAdapter.getInstance( // ) // .getReportDesignHandle( ); // // element comes from library and not to itself. // // Object newHandle = InsertInLayoutUtil.performInsert( // elementHandle, // getTargetEditPart( ) ); if ( elementHandle == null ) return false; setModel( elementHandle ); return super.preHandleMouseUp( ); // } // catch ( SemanticException e ) // { // ExceptionHandler.handle( e ); // } } return false; } /* * (non-Javadoc) * * @seeorg.eclipse.birt.designer.internal.ui.editors.schematic.tools. * AbstractToolHandleExtends#preHandleMouseDown() */ public boolean preHandleMouseDown( ) { return false; } } /** * Provides element building support for data set column. */ public static class DataSetColumnToolExtends extends AbstractToolHandleExtends { /* * (non-Javadoc) * * @seeorg.eclipse.birt.designer.internal.ui.editors.schematic.tools. * IToolHandleExtends#preHandleMouseDown() */ public boolean preHandleMouseUp( ) { if ( getRequest( ).getNewObjectType( ) instanceof DataSetItemModel || getRequest( ).getNewObjectType( ) instanceof ResultSetColumnHandle ) { try { Object newHandle = InsertInLayoutUtil.performInsert( getRequest( ).getNewObject( ), getTargetEditPart( ) ); if ( newHandle == null ) return false; setModel( newHandle ); return super.preHandleMouseUp( ); } catch ( SemanticException e ) { ExceptionHandler.handle( e ); } } return false; } /* * (non-Javadoc) * * @seeorg.eclipse.birt.designer.internal.ui.editors.schematic.tools. * AbstractToolHandleExtends#preHandleMouseDown() */ public boolean preHandleMouseDown( ) { return false; } } /** * Provides element building support for parameter. */ public static class ParameterToolExtends extends AbstractToolHandleExtends { /* * (non-Javadoc) * * @seeorg.eclipse.birt.designer.internal.ui.editors.schematic.tools. * IToolHandleExtends#preHandleMouseDown() */ public boolean preHandleMouseUp( ) { if ( getRequest( ).getNewObjectType( ) instanceof ScalarParameterHandle ) { try { Object newObj = getRequest( ).getNewObject( ); if ( newObj instanceof Object[] ) { Object[] newObjs = (Object[]) newObj; Object[] insertedObjs = new Object[newObjs.length]; for ( int i = 0; i < newObjs.length; i++ ) { DesignElementHandle elementHandle = (DesignElementHandle) newObjs[i]; if ( elementHandle.getRoot( ) instanceof LibraryHandle ) { ModuleHandle moduleHandle = SessionHandleAdapter.getInstance( ) .getReportDesignHandle( ); LibraryHandle library = (LibraryHandle) elementHandle.getRoot( ); if ( moduleHandle != library ) { try { if ( UIUtil.includeLibrary( moduleHandle, library ) ) { elementHandle = moduleHandle.getElementFactory( ) .newElementFrom( elementHandle, elementHandle.getName( ) ); moduleHandle.addElement( elementHandle, ModuleHandle.PARAMETER_SLOT ); insertedObjs[i] = elementHandle; continue; } } catch ( Exception e ) { ExceptionHandler.handle( e ); } } } insertedObjs[i] = newObjs[i]; } newObj = insertedObjs; } Object newHandle = InsertInLayoutUtil.performInsert( newObj, getTargetEditPart( ) ); if ( newHandle == null ) return false; setModel( newHandle ); return super.preHandleMouseUp( ); } catch ( SemanticException e ) { ExceptionHandler.handle( e ); } } return false; } /* * (non-Javadoc) * * @seeorg.eclipse.birt.designer.internal.ui.editors.schematic.tools. * AbstractToolHandleExtends#preHandleMouseDown() */ public boolean preHandleMouseDown( ) { return false; } } public static AbstractToolHandleExtends getAbstractToolHandleExtendsFromPaletteName( Object str ) { if ( !( str instanceof String ) ) { throw new Error( "Don't support the other type expect String" ); //$NON-NLS-1$ } String template = (String) str; AbstractToolHandleExtends preHandle = null; if ( IReportElementConstants.REPORT_ELEMENT_IMAGE.equalsIgnoreCase( template ) ) { preHandle = new ImageToolExtends( ); } else if ( IReportElementConstants.REPORT_ELEMENT_TABLE.equalsIgnoreCase( template ) ) { preHandle = new TableToolExtends( ); } else if ( IReportElementConstants.REPORT_ELEMENT_TEXT.equalsIgnoreCase( template ) || IReportElementConstants.AUTOTEXT_DATE.equalsIgnoreCase( template ) || IReportElementConstants.AUTOTEXT_CREATEDON.equalsIgnoreCase( template ) || IReportElementConstants.AUTOTEXT_CREATEDBY.equalsIgnoreCase( template ) || IReportElementConstants.AUTOTEXT_FILENAME.equalsIgnoreCase( template ) || IReportElementConstants.AUTOTEXT_LASTPRINTED.equalsIgnoreCase( template ) ) { preHandle = new TextToolExtends( ); } else if ( IReportElementConstants.AUTOTEXT_PAGE.equalsIgnoreCase( template ) || IReportElementConstants.AUTOTEXT_TOTAL_PAGE_COUNT.equalsIgnoreCase( template ) ) { preHandle = new AutoTextToolExtends( ); } else if ( IReportElementConstants.REPORT_ELEMENT_TEXTDATA.equalsIgnoreCase( template ) ) { preHandle = new TextDataToolExtends( ); } else if ( IReportElementConstants.AUTOTEXT_AUTHOR_PAGE_DATE.equalsIgnoreCase( template ) || IReportElementConstants.AUTOTEXT_CONFIDENTIAL_PAGE.equalsIgnoreCase( template ) || IReportElementConstants.REPORT_ELEMENT_GRID.equalsIgnoreCase( template ) || IReportElementConstants.AUTOTEXT_PAGEXOFY.equalsIgnoreCase( template ) ) { preHandle = new GridToolExtends( ); } else if ( IReportElementConstants.REPORT_ELEMENT_LABEL.equalsIgnoreCase( template ) ) { preHandle = new LabelToolExtends( ); } else if ( IReportElementConstants.REPORT_ELEMENT_DATA.equalsIgnoreCase( template ) ) { preHandle = new DataToolExtends( ); } else if ( IReportElementConstants.REPORT_ELEMENT_LIST.equalsIgnoreCase( template ) ) { preHandle = new ListToolExtends( ); } else if ( ( template ).startsWith( IReportElementConstants.REPORT_ELEMENT_EXTENDED ) ) { String extensionName = template.substring( IReportElementConstants.REPORT_ELEMENT_EXTENDED.length( ) ); preHandle = new ExtendedElementToolExtends( extensionName ); } else if ( IReportElementConstants.AUTOTEXT_VARIABLE.equalsIgnoreCase( template ) ) { preHandle = new VariableToolExtends( ); } if ( preHandle == null ) { throw new Error( "Don't find the AbstractToolHandleExtends" ); //$NON-NLS-1$ } return preHandle; } }
- Summary:Fix bug 265411, factor the code. - Bugzilla Bug (s) Resolved:265411 - Description: Allow drag the parameter to the cross tab cell. - Tests Description : Manual test - Files Edited: - Files Added: - Files Deleted: - Notes to Build Team: - Notes to Developers: - Notes to QA: - Notes to Documentation:
UI/org.eclipse.birt.report.designer.ui/src/org/eclipse/birt/report/designer/internal/ui/palette/BasePaletteFactory.java
- Summary:Fix bug 265411, factor the code.
<ide><path>I/org.eclipse.birt.report.designer.ui/src/org/eclipse/birt/report/designer/internal/ui/palette/BasePaletteFactory.java <ide> if ( newObj instanceof Object[] ) <ide> { <ide> Object[] newObjs = (Object[]) newObj; <del> Object[] insertedObjs = new Object[newObjs.length]; <del> for ( int i = 0; i < newObjs.length; i++ ) <del> { <del> DesignElementHandle elementHandle = (DesignElementHandle) newObjs[i]; <del> if ( elementHandle.getRoot( ) instanceof LibraryHandle ) <del> { <del> ModuleHandle moduleHandle = SessionHandleAdapter.getInstance( ) <del> .getReportDesignHandle( ); <del> LibraryHandle library = (LibraryHandle) elementHandle.getRoot( ); <del> if ( moduleHandle != library ) <del> { <del> try <del> { <del> if ( UIUtil.includeLibrary( moduleHandle, <del> library ) ) <del> { <del> elementHandle = moduleHandle.getElementFactory( ) <del> .newElementFrom( elementHandle, <del> elementHandle.getName( ) ); <del> moduleHandle.addElement( elementHandle, <del> ModuleHandle.PARAMETER_SLOT ); <del> insertedObjs[i] = elementHandle; <del> continue; <del> } <del> } <del> catch ( Exception e ) <del> { <del> ExceptionHandler.handle( e ); <del> } <del> } <del> } <del> insertedObjs[i] = newObjs[i]; <del> } <del> newObj = insertedObjs; <add> newObj = UIUtil.getInsertPamaterElements(newObjs); <ide> } <ide> <ide> Object newHandle = InsertInLayoutUtil.performInsert( newObj,
JavaScript
mit
434336c9c5ee80574c1d03d09739605f0f1ca8fa
0
codefirst/keima,codefirst/keima,codefirst/keima
const model = require('./model'); exports.index = function(req, res){ model.App.all(req.getAuthDetails().user.user_id, function(xs){ res.render('app/index', { title : 'Dashboard', apps : xs }); }); }; exports.new = function(req, res){ res.render('app/new',{title: 'Create new application'}); }; exports.create = function(req, res){ model.App.create(req.body.title, req.getAuthDetails().user.user_id, function(error){ if(error){ res.send('app create error:' + error); }else{ res.redirect('/app/'); } }); }; exports.show = function(req, res){ model.App.get(req.params.app, function(app) { res.render("app/show", { title : app.title, app : app }) }); }; exports.edit = function(req, res){ model.App.get(req.params.app, function(app) { res.render("app/edit", { title : 'Edit: ' + app.title, app : app }) }); }; exports.update = function(req, res){ model.App.update(req.params.app, { title : req.body.title }, function(){ res.redirect("/app/" + req.params.app) }); }; exports.destroy = function(req, res){ model.App.remove(req.params.app, function() { res.redirect("/app/") }); }; exports.extras = function(server,listen,name) { server.get('/' + name + '/:app/getting_start',function(req, res) { const protocol = req.headers['x-forwarded-proto'] || req.protocol; const hostname = req.headers.host; model.App.get(req.params.app, function(app) { res.render("app/getting_start", { title : "Getting start", address : protocol + "://" + hostname, app : app }) }); }) }
app.js
const model = require('./model'); exports.index = function(req, res){ model.App.all(req.getAuthDetails().user.user_id, function(xs){ res.render('app/index', { title : 'Dashboard', apps : xs }); }); }; exports.new = function(req, res){ res.render('app/new',{title: 'Create new application'}); }; exports.create = function(req, res){ model.App.create(req.body.title, req.getAuthDetails().user.user_id, function(error){ if(error){ res.send('app create error:' + error); }else{ res.redirect('/app/'); } }); }; exports.show = function(req, res){ model.App.get(req.params.app, function(app) { res.render("app/show", { title : app.title, app : app }) }); }; exports.edit = function(req, res){ model.App.get(req.params.app, function(app) { res.render("app/edit", { title : 'Edit: ' + app.title, app : app }) }); }; exports.update = function(req, res){ model.App.update(req.params.app, { title : req.body.title }, function(){ res.redirect("/app/" + req.params.app) }); }; exports.destroy = function(req, res){ model.App.remove(req.params.app, function() { res.redirect("/app/") }); }; exports.extras = function(server,listen,name) { server.get('/' + name + '/:app/getting_start',function(req, res) { const protocol = req.protocol; const hostname = req.headers.host; model.App.get(req.params.app, function(app) { res.render("app/getting_start", { title : "Getting start", address : protocol + "://" + hostname, app : app }) }); }) }
Fixed: displaying wrong URL
app.js
Fixed: displaying wrong URL
<ide><path>pp.js <ide> <ide> exports.extras = function(server,listen,name) { <ide> server.get('/' + name + '/:app/getting_start',function(req, res) { <del> const protocol = req.protocol; <add> const protocol = req.headers['x-forwarded-proto'] || req.protocol; <ide> const hostname = req.headers.host; <ide> model.App.get(req.params.app, function(app) { <ide> res.render("app/getting_start",
Java
bsd-3-clause
55f4e77640fd21902dba07936def4f76802faa06
0
PX4/jMAVSim,PX4/jMAVSim,PX4/jMAVSim
package me.drton.jmavsim; //import com.sun.j3d.utils.geometry.Box; //import com.sun.j3d.utils.geometry.Cylinder; import com.sun.j3d.utils.geometry.Sphere; import com.sun.j3d.utils.image.ImageException; import com.sun.j3d.utils.image.TextureLoader; import com.sun.j3d.utils.universe.SimpleUniverse; import javax.imageio.ImageIO; import javax.media.j3d.*; import javax.swing.*; import javax.vecmath.*; import java.awt.*; import java.awt.event.*; import java.awt.geom.AffineTransform; import java.awt.geom.Line2D; import java.awt.geom.RoundRectangle2D; import java.awt.image.BufferedImage; //import java.io.BufferedOutputStream; import java.io.IOException; import java.io.OutputStream; import java.net.URL; import java.util.List; import java.util.ArrayList; import java.util.BitSet; import java.util.Enumeration; /** * 3D Visualizer, works in own thread, synchronized with "world" thread. */ public class Visualizer3D extends JFrame { public static enum ViewTypes { VIEW_STATIC, VIEW_FPV, VIEW_GIMBAL } public static enum ZoomModes { ZOOM_NONE, ZOOM_DYNAMIC, ZOOM_FIXED } public static final double PI_2 = Math.PI / 2d; public static final String TEX_DIR = "environment/"; // folder for all environment textures public static final String SKY_TEXTURE = "HDR_040_Field_Bg.jpg"; //public static final String SKY_TEXTURE = "HDR_111_Parking_Lot_2_Bg.jpg"; // the following has a lower resolution and reduces memory usage //public static final String SKY_TEXTURE = "earth3.jpg"; public static final String GND_TEXTURE = "grass3.jpg"; //public static final String GND_TEXTURE = "ground.jpg"; public static final String COMPASS_IMG = "compass_rose.png"; // for overlay HUD public static final Dimension WINDOW_SIZE = new Dimension(1024, 768); // default application window size public static final float WORLD_SIZE = 5000.0f; // [m] size of world sphere public static final boolean AA_ENABLED = true; // default antialising for 3D scene public static final ViewTypes VIEW_TYPE = ViewTypes.VIEW_STATIC; // default view type public static final ZoomModes ZOOM_MODE = ZoomModes.ZOOM_DYNAMIC; // default zoom type public static final int FPS_TARGET = 60; // target frames per second private Dimension reportPanelSize = new Dimension(Math.min(WINDOW_SIZE.width / 2, 350), 200); private boolean reportPaused = false; private int overlaySize = 260; // width & height of compass overlay window private boolean showOverlay = true; private double defaultFOV = Math.PI / 3; // field of view private float defaultDZDistance = 25.0f; // [m] distance to object at which dynamic zoom is activated private float manZoomStep = 0.1f; // manual zoom steps as fraction of current zoom level private Vector3d viewerGroundOffset = new Vector3d(-5.0, 0.0, -1.7); // origin of ground-based fixed view private final World world; private double currentFOV = defaultFOV; private float dynZoomDistance = defaultDZDistance; private ViewTypes viewType; private ZoomModes zoomMode; private Vector3d viewerPosition = new Vector3d(); private Vector3d viewerPositionOffset = new Vector3d(); private Transform3D viewerTransform = new Transform3D(); private SimpleUniverse universe; private View view; private Canvas3D canvas; private BoundingSphere sceneBounds; private TransformGroup viewerTransformGroup; private KinematicObject viewerTargetObject; private KinematicObject viewerPositionObject; private KinematicObject vehicleViewObject; private KinematicObject gimbalViewObject; private MAVLinkHILSystem hilSystem; private JSplitPane splitPane; private ReportPanel reportPanel; private KeyboardHandler keyHandler; private OutputStream outputStream; // for receiving system output messages private MessageOutputStream msgOutputStream; // for logging messages private Matrix3d tmp_m3d1 = new Matrix3d(); // for calculations private Matrix3d tmp_m3d2 = new Matrix3d(); private Vector3d tmp_v3d = new Vector3d(); private BranchGroup tmp_bGrp; private static final long serialVersionUID = 1L; public Visualizer3D(World world) { this.world = world; keyHandler = new KeyboardHandler(); msgOutputStream = new MessageOutputStream(); outputStream = msgOutputStream; // outputStream = new BufferedOutputStream(msgOutputStream); Dimension size = WINDOW_SIZE; Rectangle sizeBounds = GraphicsEnvironment.getLocalGraphicsEnvironment().getMaximumWindowBounds(); if (size.width > sizeBounds.width) size.width = sizeBounds.width; if (size.height > sizeBounds.height) size.height = sizeBounds.height; setSize(size); setDefaultCloseOperation(EXIT_ON_CLOSE); setTitle("jMAVSim"); splitPane = new JSplitPane(JSplitPane.HORIZONTAL_SPLIT); splitPane.setOneTouchExpandable(false); splitPane.setContinuousLayout(true); splitPane.setFocusable(false); getContentPane().add(splitPane); reportPanel = new ReportPanel(); reportPanel.setFocusable(false); reportPanel.setMinimumSize(new Dimension(50, 0)); reportPanel.setPreferredSize(reportPanelSize); splitPane.setLeftComponent(reportPanel); // 3D graphics canvas GraphicsConfiguration gc = SimpleUniverse.getPreferredConfiguration(); if (showOverlay) canvas = new CustomCanvas3D(gc, size, overlaySize); else canvas = new Canvas3D(gc); canvas.setFocusable(false); canvas.addKeyListener(keyHandler); canvas.setMinimumSize(new Dimension(250, 250)); canvas.setPreferredSize(new Dimension(250, 250)); splitPane.setRightComponent(canvas); universe = new SimpleUniverse(canvas); view = universe.getViewer().getView(); view.setMinimumFrameCycleTime(1000 / FPS_TARGET); view.setBackClipDistance(WORLD_SIZE / 4); view.setSceneAntialiasingEnable(AA_ENABLED); view.setTransparencySortingPolicy(View.TRANSPARENCY_SORT_GEOMETRY); view.setFieldOfView(defaultFOV); viewerTransformGroup = universe.getViewingPlatform().getViewPlatformTransform(); createEnvironment(); setViewType(VIEW_TYPE); setZoomMode(ZOOM_MODE); setVisible(true); splitPane.resetToPreferredSizes(); toggleReportPanel(false); resetView(); canvas.requestFocus(); } public void addWorldModels() { // add any models in World for (WorldObject object : world.getObjects()) { if (object instanceof KinematicObject) { BranchGroup bg = ((KinematicObject) object).getBranchGroup(); if (bg != null) { bg.compile(); universe.addBranchGraph(bg); } } } } private void createEnvironment() { BranchGroup group = new BranchGroup(); sceneBounds = new BoundingSphere(new Point3d(0.0, 0.0, 0.0), WORLD_SIZE); float grndLevel = (float)world.getEnvironment().getGroundLevel(); double ground_offset = grndLevel + 0.005; Texture2D tex; Transform3D trans; TransformGroup tg; Matrix3d rot = new Matrix3d(); rot.rotX(PI_2); // Sky Sphere skySphere = new Sphere(1.0f, Sphere.GENERATE_NORMALS_INWARD | Sphere.GENERATE_TEXTURE_COORDS, 36); tex = loadTexture(TEX_DIR + SKY_TEXTURE); skySphere.getAppearance().setTexture(tex); trans = new Transform3D(); Matrix3d rotSky = new Matrix3d(); rotSky.rotZ(-120d*Math.PI/180d); rotSky.mul(rot); trans.setRotation(rotSky); tg = new TransformGroup(trans); tg.addChild(skySphere); // Background (sky) Background bg = new Background(); bg.setApplicationBounds(sceneBounds); bg.setColor(0, 0, 0.639f); // dark blue BranchGroup backGeoBranch = new BranchGroup(); backGeoBranch.addChild(tg); bg.setGeometry(backGeoBranch); group.addChild(bg); // Ground group.addChild(createFlatFloor(ground_offset)); // group.addChild(createMultiFloor(ground_offset)); // // cylinder-as-floor attempt, but isn't blending right with transparent overlay // trans = new Transform3D(); // trans.setRotation(rot); // trans.transform(new Vector3d(0.0, 0.0, ground_offset)); // tg = new TransformGroup(trans); // tg.addChild(createFlatFloor(ground_offset)); // group.addChild(tg); // Compass rose on ground // CompassRose rose = new CompassRose(world, 25.0f); // rose.setPositionOffset(new Vector3d(viewerGroundOffset.x, viewerGroundOffset.y, ground_offset - 0.05)); // group.addChild(rose.getBranchGroup()); // Light DirectionalLight light1 = new DirectionalLight(new Color3f(1.0f, 1.0f, 1.0f), new Vector3f(4.0f, 7.0f, 12.0f)); light1.setInfluencingBounds(sceneBounds); group.addChild(light1); AmbientLight light2 = new AmbientLight(new Color3f(0.9f, 0.9f, 0.9f)); light2.setInfluencingBounds(sceneBounds); group.addChild(light2); // Update behavior Behavior b = new UpdateBehavior(); b.setSchedulingBounds(sceneBounds); group.addChild(b); group.compile(); universe.addBranchGraph(group); } private Shape3D createFlatFloor(double height) { Appearance ap = new Appearance(); double side = WORLD_SIZE * 2.0; double dZ = height; Texture2D tex; float tiles = 1.0f; tex = loadTexture(TEX_DIR + GND_TEXTURE); if (tex != null) tiles = (int)(WORLD_SIZE / tex.getWidth()) * 800; QuadArray plane = new QuadArray(4, GeometryArray.COORDINATES | GeometryArray.TEXTURE_COORDINATE_2 ); plane.setCoordinate(0, new Point3d(-side, side, dZ)); plane.setCoordinate(1, new Point3d(side, side, dZ)); plane.setCoordinate(2, new Point3d(side, -side, dZ)); plane.setCoordinate(3, new Point3d(-side, -side, dZ)); plane.setTextureCoordinate(0, 0, new TexCoord2f(0.0f, 0.0f)); plane.setTextureCoordinate(0, 1, new TexCoord2f(tiles, 0.0f)); plane.setTextureCoordinate(0, 2, new TexCoord2f(tiles, tiles)); plane.setTextureCoordinate(0, 3, new TexCoord2f(0.0f, tiles)); // for cylinder // tex.setBoundaryModeT(Texture.WRAP); // tex.setBoundaryModeS(Texture.WRAP); // Transform3D trans = new Transform3D(); // trans.setScale(worldExtent / tex.getWidth()); // TextureAttributes texat = new TextureAttributes(); // texat.setTextureTransform(trans); // texat.setTextureMode(TextureAttributes.REPLACE); // texat.setPerspectiveCorrectionMode(TextureAttributes.NICEST); // ap.setTextureAttributes(texat); ap.setTexture(tex); //return new Cylinder(worldExtent, 0.001f, Cylinder.GENERATE_TEXTURE_COORDS | Cylinder.GENERATE_NORMALS, 8, 4, ap); return new Shape3D(plane, ap); } /* private float[][] heights; // height map for the floor // the floor is a multi-textured mesh, with splashes of extra textures private OrderedGroup createMultiFloor() { MultiFloor floor = new MultiFloor(TEX_DIR + "grass.gif", 4, TEX_DIR + "stoneBits.gif", 2); // the ground detail textures are grass and bits of stone // the frequencies (4, 2) should divide into the floor length // (FLOOR_LEN (20) in MultiFloor) with no remainder heights = floor.getHeightMap(); // Start building an ordered group of floor meshes. Ordering avoids rendering conflicts between the meshes. OrderedGroup floorOG = new OrderedGroup(); floorOG.addChild(floor); // load the textures for the splashes Texture2D flowersTex = loadTexture(TEX_DIR + "flowers.jpg"); Texture2D waterTex = loadTexture(TEX_DIR + "water.jpg"); // add splashes for(int i=0; i < 8; i++) floorOG.addChild( new SplashShape(flowersTex, heights) ); for (int i=0; i < 3; i++) floorOG.addChild( new SplashShape(waterTex, heights) ); // return all the meshes return floorOG; } */ // load image from file as a texture private Texture2D loadTexture(String fn) { System.gc(); // cleanup memory before loading the texture TextureLoader texLoader = null; Texture2D texture = new Texture2D(); texture.setEnable(false); try { texLoader = new TextureLoader(fn, null); // enable Mipmapping (increases memory usage considerably) //texLoader = new TextureLoader(fn, TextureLoader.GENERATE_MIPMAP, null); } catch (ImageException e) { System.out.println("Error, could not load texture: " + fn); System.out.println("Error message:" + e.getLocalizedMessage()); } if (texLoader != null) { texture = (Texture2D) texLoader.getTexture(); if (texture == null) System.out.println("Cannot load texture from " + fn); else { //System.out.println( "\t\tNumber Of MIPMAPS->" + texture.numMipMapLevels() ); texture.setMinFilter(Texture.NICEST); texture.setMagFilter(Texture.NICEST); texture.setAnisotropicFilterMode(texture.ANISOTROPIC_SINGLE_VALUE); texture.setAnisotropicFilterDegree(4.f); //System.out.println("Loaded texture from " + fn); texture.setEnable(true); } } return texture; } /** * Target object to point camera, has effect only if viewerPositionObject is not set. * * @param object */ public void setViewerTargetObject(KinematicObject object) { this.viewerTargetObject = object; } /** * Object to place camera on, if nullptr then camera will be placed in fixed point set by setViewerPosition(). * * @param object */ public void setViewerPositionObject(KinematicObject object) { this.viewerPositionObject = object; if (object != null && zoomMode == ZoomModes.ZOOM_DYNAMIC) nextZoomMode(); } /** * Fixed camera position, has effect only if viewerPositionObject not set. * * @param position */ public void setViewerPosition(Vector3d position) { this.viewerPositionObject = null; this.viewerPosition = position; viewerTransform.setTranslation(viewerPosition); } /** * Camera position offset from object position when viewer placed on some object * * @param offset position offset */ public void setViewerPositionOffset(Vector3d offset) { this.viewerPositionOffset = offset; } /** * Set the "vehicle" object to use for switching views. * * @param object */ public void setVehicleViewObject(KinematicObject object) { this.vehicleViewObject = object; // if (rose != null) // rose.setBaseObject(object); } /** * Set the "gimbal" object to use for switching views. * * @param object */ public void setGimbalViewObject(KinematicObject object) { this.gimbalViewObject = object; } /** * Set the system being controlled. * * @param system */ public void setHilSystem(MAVLinkHILSystem system) { this.hilSystem = system; } /** * Sets the text of the simulation report. * * @param text */ public void setReportText(String text) { if (showReportText()) reportPanel.setText(text); } /** * Check whether to show the report text */ public boolean showReportText() { return reportPanel.isShowing() && !reportPaused; } /** * Show/hide the simulation report. * * @param text */ public void toggleReportPanel(boolean on) { if (reportPanel == null || (on && reportPanel.isShowing()) || (!on && !reportPanel.isShowing())) return; setReportPaused(!on); if (reportPanel.isShowing()) { reportPanelSize = reportPanel.getSize(); splitPane.setLeftComponent(null); splitPane.setDividerSize(0); } else { reportPanel.setPreferredSize(reportPanelSize); splitPane.setLeftComponent(reportPanel); splitPane.setDividerSize((int)UIManager.get("SplitPane.dividerSize")); } splitPane.resetToPreferredSizes(); revalidate(); } public void toggleReportPanel() { this.toggleReportPanel(!reportPanel.isShowing()); } /** * Toggles updates of the report panel text. * * @param pause */ public void setReportPaused(boolean pause) { reportPaused = pause; reportPanel.setIsFocusable(pause); if (pause) ReportUpdater.setUpdateFreq(0L); else ReportUpdater.resetUpdateFreq(); } public void setShowOverlay(boolean showOverlay) { this.showOverlay = showOverlay; } /** * Toggles scene renderer antialiasing on/off */ public void setAAEnabled(boolean enable) { view.setSceneAntialiasingEnable(enable); if (showOverlay) ((CustomCanvas3D)canvas).setAA(enable); } public OutputStream getOutputStream() { return outputStream; } public void setZoomMode(ZoomModes zoomMode) { if (zoomMode == ZoomModes.ZOOM_DYNAMIC && viewType != ViewTypes.VIEW_STATIC) nextZoomMode(); else this.zoomMode = zoomMode; } public void setDynZoomDistance(float dynZoomDistance) { if (dynZoomDistance < 0.5f) dynZoomDistance = 0.5f; else { double dist = getVectorToTargetObject(viewerPosition, viewerTargetObject).length(); if (dynZoomDistance > dist + defaultDZDistance) dynZoomDistance = (float)dist + defaultDZDistance; } this.dynZoomDistance = dynZoomDistance; } public void setFieldOfView(double fov) { fov = Math.max(Math.min(fov, 2.7), 0.001); view.setFieldOfView(fov); currentFOV = fov; } public void setViewType(ViewTypes v) { switch (v) { case VIEW_STATIC : // Put camera on static point and point to vehicle if (this.viewType != ViewTypes.VIEW_STATIC && vehicleViewObject != null) { this.viewType = ViewTypes.VIEW_STATIC; Vector3d pos = new Vector3d(viewerGroundOffset); pos.z = (pos.z + world.getEnvironment().getGroundLevel()); this.setViewerPosition(pos); this.setViewerTargetObject(vehicleViewObject); } break; case VIEW_FPV : // Put camera on vehicle (FPV) if (this.viewType != ViewTypes.VIEW_FPV && vehicleViewObject != null) { this.viewType = ViewTypes.VIEW_FPV; this.setViewerPositionObject(vehicleViewObject); this.setViewerPositionOffset(new Vector3d(-0.0f, 0.0f, -0.3f)); // Offset from vehicle center } break; case VIEW_GIMBAL : if (this.viewType != ViewTypes.VIEW_GIMBAL && gimbalViewObject != null) { this.viewType = ViewTypes.VIEW_GIMBAL; this.setViewerPositionObject(gimbalViewObject); this.setViewerPositionOffset(new Vector3d(0.0f, 0.0f, 0.0f)); } else System.out.println("Unable to set view, gimbal not mounted."); break; } } private void nextZoomMode() { if (zoomMode == ZoomModes.ZOOM_NONE && viewType == ViewTypes.VIEW_STATIC) { zoomMode = ZoomModes.ZOOM_DYNAMIC; } else if (zoomMode == ZoomModes.ZOOM_FIXED) { zoomMode = ZoomModes.ZOOM_NONE; view.setFieldOfView(defaultFOV); } else { zoomMode = ZoomModes.ZOOM_FIXED; view.setFieldOfView(currentFOV); } } public void resetView() { tmp_m3d1.rotZ(Math.PI); tmp_m3d2.rotY(PI_2); tmp_m3d1.mul(tmp_m3d2); tmp_m3d2.rotZ(-PI_2); tmp_m3d1.mul(tmp_m3d2); viewerTransform.setRotation(tmp_m3d1); } public Vector3d getVectorToTargetObject(Vector3d from, KinematicObject objTo) { Vector3d ret = new Vector3d(); ret.sub(objTo.getPosition(), from); return ret; } private void updateVisualizer() { double dist; synchronized (world) { // Synchronize with "world" thread try { // Update branch groups of all kinematic objects for (WorldObject object : world.getObjects()) { if (object instanceof KinematicObject) { tmp_bGrp = ((KinematicObject) object).getBranchGroup(); if (tmp_bGrp != null) { ((KinematicObject) object).updateBranchGroup(); } } } // Update view platform if (viewerPositionObject != null) { // Camera on object viewerPosition.set(viewerPositionOffset); viewerPositionObject.getRotation().transform(viewerPosition); viewerPosition.add(viewerPositionObject.getPosition()); viewerTransform.setTranslation(viewerPosition); tmp_m3d1.set(viewerPositionObject.getRotation()); tmp_m3d2.rotZ(PI_2); tmp_m3d1.mul(tmp_m3d2); tmp_m3d2.rotX(-PI_2); tmp_m3d1.mul(tmp_m3d2); viewerTransform.setRotation(tmp_m3d1); } else if (viewerTargetObject != null) { // Fixed-position camera, point camera to target tmp_v3d = viewerTargetObject.getPosition(); dist = getVectorToTargetObject(viewerPosition, viewerTargetObject).length(); tmp_m3d1.rotZ(Math.PI); tmp_m3d2.rotY(PI_2); tmp_m3d1.mul(tmp_m3d2); tmp_m3d2.rotZ(-PI_2); tmp_m3d1.mul(tmp_m3d2); tmp_m3d2.rotY(-Math.atan2(tmp_v3d.y - viewerPosition.y, tmp_v3d.x - viewerPosition.x)); tmp_m3d1.mul(tmp_m3d2); tmp_m3d2.rotX(-Math.asin((tmp_v3d.z - viewerPosition.z) / dist)); tmp_m3d1.mul(tmp_m3d2); viewerTransform.setRotation(tmp_m3d1); if (zoomMode == ZoomModes.ZOOM_DYNAMIC) { if (dist > dynZoomDistance) view.setFieldOfView(dynZoomDistance / dist * currentFOV); else view.setFieldOfView(currentFOV); } } viewerTransformGroup.setTransform(viewerTransform); } catch (BadTransformException e) { e.printStackTrace(); } } } /* * Reset Rotation, Acceleration, Velocity */ private void resetObjectRAV(KinematicObject obj, boolean resetPos) { if (obj == null) return; Vector3f oldpos = new Vector3f(obj.getPosition()); obj.resetObjectParameters(); if (!resetPos) moveObject(obj, oldpos, true); else obj.setIgnoreGravity(false); } /* * Rotate object in steps */ private void rotateObject(KinematicObject obj, Vector3f vec, float deg) { if (obj == null) return; Matrix3d rot = obj.getRotation(); Matrix3d r = new Matrix3d(); if (vec == null) { r.rotZ(0.0); } else { AxisAngle4f aa = new AxisAngle4f(vec, (float)Math.toRadians(deg)); r.set(aa); } rot.mulNormalize(r); } /* * Set a continuous rotation rate of an object */ private void spinRateObject(KinematicObject obj, Vector3f vec) { if (obj == null) return; if (vec == null) obj.setRotationRate(new Vector3d()); else { // if still on ground, move it up so it can rotate if (obj.getPosition().z >= 0) moveObject(obj, new Vector3f(0f, 0f, -2.0f), false); obj.getRotationRate().add(new Vector3d(vec)); } } /* * Change position of an object */ private void moveObject(KinematicObject obj, Vector3f vec, boolean absolute) { if (obj == null) return; Vector3d pos = obj.getPosition(); if (absolute) pos.set(vec); else pos.add(new Vector3d(vec)); obj.setIgnoreGravity(pos.z < 0.0); // if (pos.z >= 0.0) // //world.getEnvironment().setG(null); // else // world.getEnvironment().setG(new Vector3d()); } /* * Manipulate wind in environment */ private void windDirection(Vector3f vec, boolean setBase, boolean setCurrent, boolean setDeviation) { if (vec == null) { if (setBase) world.getEnvironment().setWind(new Vector3d()); if (setCurrent) { world.getEnvironment().setCurrentWind(new Vector3d()); System.out.println("Wind reset to zero."); } if (setDeviation) { world.getEnvironment().setWindDeviation(new Vector3d()); System.out.println("Wind deviation reset to zero."); } } else { Vector3d adj = new Vector3d(vec); if (setBase) world.getEnvironment().getWind().add(adj); if (setCurrent) { world.getEnvironment().getCurrentWind(null).add(adj); System.out.println("Wind vector is now " + ReportUtil.vector2str(world.getEnvironment().getCurrentWind(viewerPosition))); } if (setDeviation) { world.getEnvironment().world.getEnvironment().getWindDeviation().add(adj); System.out.println("Wind deviation is now " + ReportUtil.vector2str(world.getEnvironment().getWindDeviation())); } } } // //// private Classes // /* * Custom Canvas class for drawing optional overlay HUD */ private class CustomCanvas3D extends Canvas3D { private static final long serialVersionUID = 7144426579917281131L; private int[] overlayMargins = {10, 10}; // x, y from left/right bottom corner private Font font = new Font("SansSerif", Font.BOLD, 14); private Color txtColor = Color.white; private Color hdgColor = Color.magenta; private Color crsColor = Color.green; private Color windColor = Color.blue; // system messages overlay private Font msgFont = new Font("SansSerif", Font.PLAIN, 14); private Color msgColor = new Color(255, 255, 255, 240); private Color msgBgColor = new Color(202, 162, 0, 60); private BufferedImage compassOverlay; private J3DGraphics2D g2d; private Matrix3d m1 = new Matrix3d(); private AffineTransform affTrans = new AffineTransform(); private BufferedImage drawImg; private Graphics2D drawg2d; private Line2D.Float hdgLine; private Line2D.Float crsLine; private Line2D.Float windLine; private BasicStroke crsStroke = new BasicStroke(2.5f); // drawn last, on top private BasicStroke hdgStroke = new BasicStroke(4.0f); private BasicStroke wndStroke = new BasicStroke(5.5f); // drawn first, on bottom private RoundRectangle2D.Float msgBg = new RoundRectangle2D.Float(); private int[] overlaySize = new int[2]; // x, y private int[] messagesSize = {450, 600}; // x, y private int msgLineHeight = 15; private int halfW; private int fps = 1; private int framesCount = 0; private long frameTime = 0L; public CustomCanvas3D(GraphicsConfiguration gc, Dimension windowSize, int overlayWidth) { super(gc); g2d = this.getGraphics2D(); setAA(AA_ENABLED); // constrain overlay sizes if (overlayWidth > windowSize.getWidth() / 2) overlayWidth = (int) (windowSize.getWidth() / 2); if (overlayWidth + 45 > windowSize.getHeight() / 2) overlayWidth = (int) (windowSize.getHeight() / 2); if (messagesSize[0] > windowSize.getWidth() / 2) messagesSize[0] = (int) (windowSize.getWidth() / 2); if (messagesSize[1] > windowSize.getHeight() * 0.75) messagesSize[1] = (int) (windowSize.getHeight() * 0.75); overlaySize[0] = overlayWidth; overlaySize[1] = overlayWidth + 45; halfW = overlayWidth / 2; frameTime = System.nanoTime(); // drawing surface for vector lines drawImg = new BufferedImage(overlayWidth, overlayWidth, BufferedImage.TYPE_4BYTE_ABGR); drawg2d = drawImg.createGraphics(); // load and scale compass image for overlay URL file = null; compassOverlay = new BufferedImage(overlayWidth, overlayWidth, BufferedImage.TYPE_4BYTE_ABGR); try { file = new URL("file:./" + TEX_DIR + COMPASS_IMG); if (file != null) { Image img = ImageIO.read(file); img = img.getScaledInstance(overlayWidth, overlayWidth, Image.SCALE_SMOOTH); compassOverlay.createGraphics().drawImage(img, 0, 0, null); } } catch (IOException e) { System.out.println("Error, could not load image: " + TEX_DIR + COMPASS_IMG); System.out.println("Error message:" + e.getLocalizedMessage()); } // set up vector lines for HUD hdgLine = new Line2D.Float(0, 0, 0, halfW * -0.85f); crsLine = new Line2D.Float(0, 0, 0, halfW * -0.425f); windLine = new Line2D.Float(0, 0, 0, halfW * -0.425f); } public void setAA(boolean on) { if (on) { g2d.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON); g2d.setRenderingHint(RenderingHints.KEY_RENDERING, RenderingHints.VALUE_RENDER_QUALITY); g2d.setRenderingHint(RenderingHints.KEY_ALPHA_INTERPOLATION, RenderingHints.VALUE_ALPHA_INTERPOLATION_QUALITY); g2d.setRenderingHint(RenderingHints.KEY_INTERPOLATION, RenderingHints.VALUE_INTERPOLATION_BICUBIC); } else { g2d.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_OFF); g2d.setRenderingHint(RenderingHints.KEY_RENDERING, RenderingHints.VALUE_RENDER_SPEED); g2d.setRenderingHint(RenderingHints.KEY_ALPHA_INTERPOLATION, RenderingHints.VALUE_ALPHA_INTERPOLATION_SPEED); g2d.setRenderingHint(RenderingHints.KEY_INTERPOLATION, RenderingHints.VALUE_INTERPOLATION_BILINEAR); } } // we draw the HUD/overlay here public void postRender() { if (!showOverlay) return; int x = overlayMargins[0]; int y = this.getHeight() - overlaySize[1] - overlayMargins[1]; double z, dZ, norm; Vector3d vect; clearDrawing(); // compass rotation in relation to viewer viewerTransform.get(m1); dZ = -Math.atan2(m1.getElement(1, 0), m1.getElement(0, 0)) + Math.toRadians(90.0); affTrans.setToRotation(dZ, halfW, halfW); drawg2d.setTransform(affTrans); drawg2d.drawImage(compassOverlay, 0, 0, this); // wind line in relation to viewer vect = world.getEnvironment().getCurrentWind(viewerPosition); norm = Math.sqrt(vect.x * vect.x + vect.y * vect.y); affTrans.setToTranslation(halfW, halfW); affTrans.rotate(vect.x, vect.y); affTrans.rotate(dZ); // scale length and width based on wind speed affTrans.scale(Math.max(Math.min(Math.abs(vect.z) * 0.5, 10.0), 1.0), Math.min(norm * 0.2, halfW * 0.85)); drawg2d.setTransform(affTrans); drawg2d.setColor(windColor); drawg2d.setStroke(wndStroke); drawg2d.draw(windLine); if (vehicleViewObject != null) { // heading line m1 = (Matrix3d) vehicleViewObject.getRotation().clone(); z = Math.atan2(m1.getElement(1, 0), m1.getElement(0, 0)); affTrans.setToTranslation(halfW, halfW); affTrans.rotate(z + dZ); drawg2d.setTransform(affTrans); drawg2d.setColor(hdgColor); drawg2d.setStroke(hdgStroke); drawg2d.draw(hdgLine); // course over ground line vect = vehicleViewObject.getVelocity(); z = Math.atan2(vect.y, vect.x); norm = Math.sqrt(vect.x * vect.x + vect.y * vect.y); affTrans.setToTranslation(halfW, halfW); affTrans.rotate(z + dZ); // scale length and width based on vehicle speed affTrans.scale(Math.max(Math.min(Math.abs(vect.z) * 0.5, 10.0), 1.0), Math.min(norm * 0.2, halfW * 0.85)); drawg2d.setTransform(affTrans); drawg2d.setColor(crsColor); drawg2d.setStroke(crsStroke); drawg2d.draw(crsLine); } // now draw the composed compass + vectors image on the main J3DGraphics2D g2d.drawImage(drawImg, x, y, this); // draw all HUD text items g2d.setFont(font); g2d.setColor(txtColor); y += drawImg.getHeight() + 25; String zmode = zoomMode == ZoomModes.ZOOM_NONE ? "Fixed" : zoomMode == ZoomModes.ZOOM_DYNAMIC ? "Dynamic" : "Manual"; if (zoomMode == ZoomModes.ZOOM_DYNAMIC) zmode += String.format(" @ %.2fm", dynZoomDistance); zmode += String.format(" FOV: %.2f\u00b0", Math.toDegrees(view.getFieldOfView())); g2d.drawString("Zoom mode: " + zmode, x, y); y += 20; g2d.drawString(String.format("FPS: %3d", fps), x, y); x += 70; g2d.setColor(hdgColor); g2d.drawString("HDG", x, y); x += 40; g2d.setColor(crsColor); g2d.drawString("CRS", x, y); x += 40; g2d.setColor(windColor); g2d.drawString("WND", x, y); // messages on the bottom right if (msgOutputStream.getListLen() > 0) { x = this.getWidth() - messagesSize[0] - overlayMargins[0]; int h = Math.min(messagesSize[1], msgOutputStream.getListLen() * msgLineHeight + 5); y = this.getHeight() - h - overlayMargins[1]; msgBg.setRoundRect(x, y, messagesSize[0], h, 15, 15); g2d.setColor(msgBgColor); g2d.draw(msgBg); g2d.fill(msgBg); x += 10; y += msgLineHeight; g2d.setFont(msgFont); g2d.setColor(msgColor); for (String msg : msgOutputStream.getStrings()) { g2d.drawString(msg, x, y); y += msgLineHeight; if (y > this.getHeight()) break; } } g2d.flush(false); ++framesCount; if (System.nanoTime() - frameTime >= (long)1e9) { fps = framesCount; framesCount = 0; frameTime = System.nanoTime(); } } private void clearDrawing() { // clear drawing image affTrans.setToIdentity(); drawg2d.setTransform(affTrans); drawg2d.setComposite(AlphaComposite.getInstance(AlphaComposite.CLEAR, 0.0f)); drawg2d.fillRect(0, 0, overlaySize[0], overlaySize[0]); drawg2d.setComposite(AlphaComposite.getInstance(AlphaComposite.SRC_OVER, 1.0f)); drawg2d.setColor(Color.BLACK); } } /* * KeyboardHandler */ public class KeyboardHandler extends KeyAdapter { public BitSet keyBits = new BitSet(256); @Override public void keyReleased(KeyEvent e) { keyBits.clear(e.getKeyCode()); checkCumulativeKeys(); switch (e.getKeyCode()) { // View swtich keys case KeyEvent.VK_F : setViewType(ViewTypes.VIEW_FPV); break; case KeyEvent.VK_S : setViewType(ViewTypes.VIEW_STATIC); break; case KeyEvent.VK_G : setViewType(ViewTypes.VIEW_GIMBAL); break; // reporting panel case KeyEvent.VK_R : toggleReportPanel(); break; // pause/start report updates case KeyEvent.VK_T : setReportPaused(!reportPaused); break; // toggle zoom mode fixed/dynamic/manual case KeyEvent.VK_Z : nextZoomMode(); break; // zoom reset case KeyEvent.VK_0 : case KeyEvent.VK_ENTER : zoomMode = ZoomModes.ZOOM_NONE; setFieldOfView(defaultFOV); setDynZoomDistance(defaultDZDistance); break; // init sim mode case KeyEvent.VK_I : if (hilSystem != null) hilSystem.initMavLink(); break; // quit sim mode case KeyEvent.VK_Q : if (hilSystem != null) hilSystem.endSim(); break; // toggle HUD overlay case KeyEvent.VK_H : setShowOverlay(!showOverlay); break; // clear messages from HUD case KeyEvent.VK_C : msgOutputStream.clearMessages(); break; // show help text case KeyEvent.VK_F1 : msgOutputStream.clearMessages(); msgOutputStream.setNumOfMessages(50); Simulator.printKeyCommands(); msgOutputStream.resetNumOfMessages(); break; // exit app case KeyEvent.VK_ESCAPE : dispatchEvent(new WindowEvent(getWindows()[0], WindowEvent.WINDOW_CLOSING)); break; // full view and object reset case KeyEvent.VK_SPACE : resetObjectRAV(vehicleViewObject, true); resetView(); break; // vehicle object resets case KeyEvent.VK_NUMPAD5 : // reset wind if (keyBits.get(KeyEvent.VK_ALT)) windDirection(null, true, true, true); // reset vehicle rotation, etc else if (keyBits.get(KeyEvent.VK_CONTROL)) resetObjectRAV(vehicleViewObject, false); // reset only rotation rate else spinRateObject(vehicleViewObject, null); break; } } @Override public void keyPressed(KeyEvent e) { keyBits.set(e.getKeyCode()); checkCumulativeKeys(); switch (e.getKeyCode()) { // zoom in case KeyEvent.VK_PLUS : case KeyEvent.VK_ADD : case KeyEvent.VK_EQUALS : if (zoomMode == ZoomModes.ZOOM_DYNAMIC) setDynZoomDistance(dynZoomDistance * (1.0f - manZoomStep)); else { zoomMode = ZoomModes.ZOOM_FIXED; setFieldOfView(currentFOV * (1.0f - manZoomStep)); } break; // zoom out case KeyEvent.VK_MINUS : case KeyEvent.VK_SUBTRACT : if (zoomMode == ZoomModes.ZOOM_DYNAMIC) setDynZoomDistance(dynZoomDistance * (1.0f + manZoomStep)); else { zoomMode = ZoomModes.ZOOM_FIXED; setFieldOfView(currentFOV * (1.0f + manZoomStep)); } break; } } public void checkCumulativeKeys() { // how to move Vector3f dir = new Vector3f(); // how much to move float deg = keyBits.get(KeyEvent.VK_CONTROL) ? 5.0f : 1.0f; // magnitude of move (rotation magnitude is always 1) float m = keyBits.get(KeyEvent.VK_SHIFT) ? deg / 5.0f : 1.0f; if (keyBits.get(KeyEvent.VK_LEFT) || keyBits.get(KeyEvent.VK_KP_LEFT)) dir.x = (-m); if (keyBits.get(KeyEvent.VK_RIGHT) || keyBits.get(KeyEvent.VK_KP_RIGHT)) dir.x = (m); if (keyBits.get(KeyEvent.VK_UP) || keyBits.get(KeyEvent.VK_KP_UP)) dir.y = (-m); if (keyBits.get(KeyEvent.VK_DOWN) || keyBits.get(KeyEvent.VK_KP_DOWN)) dir.y = (m); if (keyBits.get(KeyEvent.VK_END) || keyBits.get(KeyEvent.VK_INSERT)) dir.z = (-m); if (keyBits.get(KeyEvent.VK_PAGE_DOWN) || keyBits.get(KeyEvent.VK_DELETE)) dir.z = (m); if (dir.length() != 0.0) { if (keyHandler.keyBits.get(KeyEvent.VK_ALT)) { // wind deviation dir.set(-dir.y, dir.x, dir.z); windDirection(dir, false, false, true); } else if (keyBits.get(KeyEvent.VK_SHIFT)) { // move vehicle dir.set(-dir.y, dir.x, dir.z); moveObject(vehicleViewObject, dir, false); } else // rotate vehicle rotateObject(vehicleViewObject, dir, deg); } // check for keypad events (rotation rate or wind force) if (keyBits.get(KeyEvent.VK_NUMPAD5)) return; dir = new Vector3f(); m = keyHandler.keyBits.get(KeyEvent.VK_CONTROL) ? 1.0f : 0.5f; if (keyBits.get(KeyEvent.VK_NUMPAD4)) dir.x = (-m); if (keyBits.get(KeyEvent.VK_NUMPAD6)) dir.x = (m); if (keyBits.get(KeyEvent.VK_NUMPAD8)) dir.y = (-m); if (keyBits.get(KeyEvent.VK_NUMPAD2)) dir.y = (m); if (keyBits.get(KeyEvent.VK_NUMPAD1)) dir.z = (-m); if (keyBits.get(KeyEvent.VK_NUMPAD3) || keyBits.get(KeyEvent.VK_NUMPAD7)) dir.z = (m); if (dir.length() != 0.0) { if (keyHandler.keyBits.get(KeyEvent.VK_ALT)) { // wind strength but not deviation dir.set(-dir.y, dir.x, -dir.z); windDirection(dir, true, true, false); } else // adjust vehicle spin rate spinRateObject(vehicleViewObject, dir); } } } // end KeyboardHandler /* * Thread updater */ class UpdateBehavior extends Behavior { private WakeupCondition condition = new WakeupOnElapsedFrames(0, false); @Override public void initialize() { wakeupOn(condition); } @Override @SuppressWarnings("rawtypes") public void processStimulus(Enumeration wakeup) { Object w; while (wakeup.hasMoreElements()) { w = wakeup.nextElement(); if (w instanceof WakeupOnElapsedFrames) { updateVisualizer(); } wakeupOn(condition); } } } /* * System message logger */ class MessageOutputStream extends OutputStream { private final int strcap = 16; // number of messages to store private final int bufcap = 80; // line length limit private int numOfMessages = strcap; private final StringBuffer buf = new StringBuffer(bufcap); private int buflen = 0; private final List<String> strings = new ArrayList<String>(strcap); private boolean mtx = false; @Override public void write(int b) throws IOException { char c = (char)b; buf.append(c); if (c == '\n' || ++buflen >= bufcap) this.flush(); } @Override public void flush() { if (mtx) // do not block return; mtx = true; while (strings.size() > numOfMessages) strings.remove(0); String line = buf.toString().replaceAll("(.+)[\\r\\n]", "$1"); if (!line.isEmpty()) strings.add(line); buflen = 0; buf.setLength(buflen); mtx = false; } public List<String> getStrings() { if (mtx) // do not block return new ArrayList<String>(); return new ArrayList<String>(strings); } public int getListLen() { return strings.size(); } public void clearMessages() { if (!mtx) strings.clear(); } public int getNumOfMessages() { return numOfMessages; } public void setNumOfMessages(int numOfMessages) { this.numOfMessages = numOfMessages; } public void resetNumOfMessages() { this.numOfMessages = strcap; } } }
src/me/drton/jmavsim/Visualizer3D.java
package me.drton.jmavsim; //import com.sun.j3d.utils.geometry.Box; //import com.sun.j3d.utils.geometry.Cylinder; import com.sun.j3d.utils.geometry.Sphere; import com.sun.j3d.utils.image.ImageException; import com.sun.j3d.utils.image.TextureLoader; import com.sun.j3d.utils.universe.SimpleUniverse; import javax.imageio.ImageIO; import javax.media.j3d.*; import javax.swing.*; import javax.vecmath.*; import java.awt.*; import java.awt.event.*; import java.awt.geom.AffineTransform; import java.awt.geom.Line2D; import java.awt.geom.RoundRectangle2D; import java.awt.image.BufferedImage; //import java.io.BufferedOutputStream; import java.io.IOException; import java.io.OutputStream; import java.net.URL; import java.util.List; import java.util.ArrayList; import java.util.BitSet; import java.util.Enumeration; /** * 3D Visualizer, works in own thread, synchronized with "world" thread. */ public class Visualizer3D extends JFrame { public static enum ViewTypes { VIEW_STATIC, VIEW_FPV, VIEW_GIMBAL } public static enum ZoomModes { ZOOM_NONE, ZOOM_DYNAMIC, ZOOM_FIXED } public static final double PI_2 = Math.PI / 2d; public static final String TEX_DIR = "environment/"; // folder for all environment textures public static final String SKY_TEXTURE = "earth3.jpg"; public static final String GND_TEXTURE = "ground.jpg"; public static final String COMPASS_IMG = "compass_rose.png"; // for overlay HUD public static final Dimension WINDOW_SIZE = new Dimension(1024, 768); // default application window size public static final float WORLD_SIZE = 50000.0f; // [m] size of world sphere public static final boolean AA_ENABLED = true; // default antialising for 3D scene public static final ViewTypes VIEW_TYPE = ViewTypes.VIEW_STATIC; // default view type public static final ZoomModes ZOOM_MODE = ZoomModes.ZOOM_DYNAMIC; // default zoom type public static final int FPS_TARGET = 60; // target frames per second private Dimension reportPanelSize = new Dimension(Math.min(WINDOW_SIZE.width / 2, 350), 200); private boolean reportPaused = false; private int overlaySize = 260; // width & height of compass overlay window private boolean showOverlay = true; private double defaultFOV = Math.PI / 3; // field of view private float defaultDZDistance = 25.0f; // [m] distance to object at which dynamic zoom is activated private float manZoomStep = 0.1f; // manual zoom steps as fraction of current zoom level private Vector3d viewerGroundOffset = new Vector3d(-5.0, 0.0, -1.7); // origin of ground-based fixed view private final World world; private double currentFOV = defaultFOV; private float dynZoomDistance = defaultDZDistance; private ViewTypes viewType; private ZoomModes zoomMode; private Vector3d viewerPosition = new Vector3d(); private Vector3d viewerPositionOffset = new Vector3d(); private Transform3D viewerTransform = new Transform3D(); private SimpleUniverse universe; private View view; private Canvas3D canvas; private BoundingSphere sceneBounds; private TransformGroup viewerTransformGroup; private KinematicObject viewerTargetObject; private KinematicObject viewerPositionObject; private KinematicObject vehicleViewObject; private KinematicObject gimbalViewObject; private MAVLinkHILSystem hilSystem; private JSplitPane splitPane; private ReportPanel reportPanel; private KeyboardHandler keyHandler; private OutputStream outputStream; // for receiving system output messages private MessageOutputStream msgOutputStream; // for logging messages private Matrix3d tmp_m3d1 = new Matrix3d(); // for calculations private Matrix3d tmp_m3d2 = new Matrix3d(); private Vector3d tmp_v3d = new Vector3d(); private BranchGroup tmp_bGrp; private static final long serialVersionUID = 1L; public Visualizer3D(World world) { this.world = world; keyHandler = new KeyboardHandler(); msgOutputStream = new MessageOutputStream(); outputStream = msgOutputStream; // outputStream = new BufferedOutputStream(msgOutputStream); Dimension size = WINDOW_SIZE; Rectangle sizeBounds = GraphicsEnvironment.getLocalGraphicsEnvironment().getMaximumWindowBounds(); if (size.width > sizeBounds.width) size.width = sizeBounds.width; if (size.height > sizeBounds.height) size.height = sizeBounds.height; setSize(size); setDefaultCloseOperation(EXIT_ON_CLOSE); setTitle("jMAVSim"); splitPane = new JSplitPane(JSplitPane.HORIZONTAL_SPLIT); splitPane.setOneTouchExpandable(false); splitPane.setContinuousLayout(true); splitPane.setFocusable(false); getContentPane().add(splitPane); reportPanel = new ReportPanel(); reportPanel.setFocusable(false); reportPanel.setMinimumSize(new Dimension(50, 0)); reportPanel.setPreferredSize(reportPanelSize); splitPane.setLeftComponent(reportPanel); // 3D graphics canvas GraphicsConfiguration gc = SimpleUniverse.getPreferredConfiguration(); if (showOverlay) canvas = new CustomCanvas3D(gc, size, overlaySize); else canvas = new Canvas3D(gc); canvas.setFocusable(false); canvas.addKeyListener(keyHandler); canvas.setMinimumSize(new Dimension(250, 250)); canvas.setPreferredSize(new Dimension(250, 250)); splitPane.setRightComponent(canvas); universe = new SimpleUniverse(canvas); view = universe.getViewer().getView(); view.setMinimumFrameCycleTime(1000 / FPS_TARGET); view.setBackClipDistance(WORLD_SIZE / 4); view.setSceneAntialiasingEnable(AA_ENABLED); view.setTransparencySortingPolicy(View.TRANSPARENCY_SORT_GEOMETRY); view.setFieldOfView(defaultFOV); viewerTransformGroup = universe.getViewingPlatform().getViewPlatformTransform(); createEnvironment(); setViewType(VIEW_TYPE); setZoomMode(ZOOM_MODE); setVisible(true); splitPane.resetToPreferredSizes(); toggleReportPanel(false); resetView(); canvas.requestFocus(); } public void addWorldModels() { // add any models in World for (WorldObject object : world.getObjects()) { if (object instanceof KinematicObject) { BranchGroup bg = ((KinematicObject) object).getBranchGroup(); if (bg != null) { bg.compile(); universe.addBranchGraph(bg); } } } } private void createEnvironment() { BranchGroup group = new BranchGroup(); sceneBounds = new BoundingSphere(new Point3d(0.0, 0.0, 0.0), WORLD_SIZE); float grndLevel = (float)world.getEnvironment().getGroundLevel(); double ground_offset = grndLevel + 0.005; Texture2D tex; Transform3D trans; TransformGroup tg; Matrix3d rot = new Matrix3d(); rot.rotX(PI_2); // Sky Sphere skySphere = new Sphere(1.0f, Sphere.GENERATE_NORMALS_INWARD | Sphere.GENERATE_TEXTURE_COORDS, 36); tex = loadTexture(TEX_DIR + SKY_TEXTURE); skySphere.getAppearance().setTexture(tex); trans = new Transform3D(); trans.setRotation(rot); tg = new TransformGroup(trans); tg.addChild(skySphere); // Background (sky) Background bg = new Background(); bg.setApplicationBounds(sceneBounds); bg.setColor(0, 0, 0.639f); // dark blue BranchGroup backGeoBranch = new BranchGroup(); backGeoBranch.addChild(tg); bg.setGeometry(backGeoBranch); group.addChild(bg); // Ground group.addChild(createFlatFloor(ground_offset)); // group.addChild(createMultiFloor(ground_offset)); // // cylinder-as-floor attempt, but isn't blending right with transparent overlay // trans = new Transform3D(); // trans.setRotation(rot); // trans.transform(new Vector3d(0.0, 0.0, ground_offset)); // tg = new TransformGroup(trans); // tg.addChild(createFlatFloor(ground_offset)); // group.addChild(tg); // Compass rose on ground // CompassRose rose = new CompassRose(world, 25.0f); // rose.setPositionOffset(new Vector3d(viewerGroundOffset.x, viewerGroundOffset.y, ground_offset - 0.05)); // group.addChild(rose.getBranchGroup()); // Light DirectionalLight light1 = new DirectionalLight(new Color3f(1.0f, 1.0f, 1.0f), new Vector3f(4.0f, 7.0f, 12.0f)); light1.setInfluencingBounds(sceneBounds); group.addChild(light1); AmbientLight light2 = new AmbientLight(new Color3f(0.9f, 0.9f, 0.9f)); light2.setInfluencingBounds(sceneBounds); group.addChild(light2); // Update behavior Behavior b = new UpdateBehavior(); b.setSchedulingBounds(sceneBounds); group.addChild(b); group.compile(); universe.addBranchGraph(group); } private Shape3D createFlatFloor(double height) { Appearance ap = new Appearance(); double side = WORLD_SIZE * 2.0; double dZ = height; Texture2D tex; float tiles = 1.0f; tex = loadTexture(TEX_DIR + GND_TEXTURE); if (tex != null) tiles = (int)(WORLD_SIZE / tex.getWidth()) * 10; QuadArray plane = new QuadArray(4, GeometryArray.COORDINATES | GeometryArray.TEXTURE_COORDINATE_2 ); plane.setCoordinate(0, new Point3d(-side, side, dZ)); plane.setCoordinate(1, new Point3d(side, side, dZ)); plane.setCoordinate(2, new Point3d(side, -side, dZ)); plane.setCoordinate(3, new Point3d(-side, -side, dZ)); plane.setTextureCoordinate(0, 0, new TexCoord2f(0.0f, 0.0f)); plane.setTextureCoordinate(0, 1, new TexCoord2f(tiles, 0.0f)); plane.setTextureCoordinate(0, 2, new TexCoord2f(tiles, tiles)); plane.setTextureCoordinate(0, 3, new TexCoord2f(0.0f, tiles)); // for cylinder // tex.setBoundaryModeT(Texture.WRAP); // tex.setBoundaryModeS(Texture.WRAP); // Transform3D trans = new Transform3D(); // trans.setScale(worldExtent / tex.getWidth()); // TextureAttributes texat = new TextureAttributes(); // texat.setTextureTransform(trans); // texat.setTextureMode(TextureAttributes.REPLACE); // texat.setPerspectiveCorrectionMode(TextureAttributes.NICEST); // ap.setTextureAttributes(texat); ap.setTexture(tex); //return new Cylinder(worldExtent, 0.001f, Cylinder.GENERATE_TEXTURE_COORDS | Cylinder.GENERATE_NORMALS, 8, 4, ap); return new Shape3D(plane, ap); } /* private float[][] heights; // height map for the floor // the floor is a multi-textured mesh, with splashes of extra textures private OrderedGroup createMultiFloor() { MultiFloor floor = new MultiFloor(TEX_DIR + "grass.gif", 4, TEX_DIR + "stoneBits.gif", 2); // the ground detail textures are grass and bits of stone // the frequencies (4, 2) should divide into the floor length // (FLOOR_LEN (20) in MultiFloor) with no remainder heights = floor.getHeightMap(); // Start building an ordered group of floor meshes. Ordering avoids rendering conflicts between the meshes. OrderedGroup floorOG = new OrderedGroup(); floorOG.addChild(floor); // load the textures for the splashes Texture2D flowersTex = loadTexture(TEX_DIR + "flowers.jpg"); Texture2D waterTex = loadTexture(TEX_DIR + "water.jpg"); // add splashes for(int i=0; i < 8; i++) floorOG.addChild( new SplashShape(flowersTex, heights) ); for (int i=0; i < 3; i++) floorOG.addChild( new SplashShape(waterTex, heights) ); // return all the meshes return floorOG; } */ // load image from file as a texture private Texture2D loadTexture(String fn) { System.gc(); // cleanup memory before loading the texture TextureLoader texLoader = null; Texture2D texture = new Texture2D(); texture.setEnable(false); try { texLoader = new TextureLoader(fn, null); // enable Mipmapping (increases memory usage considerably) //texLoader = new TextureLoader(fn, TextureLoader.GENERATE_MIPMAP, null); } catch (ImageException e) { System.out.println("Error, could not load texture: " + fn); System.out.println("Error message:" + e.getLocalizedMessage()); } if (texLoader != null) { texture = (Texture2D) texLoader.getTexture(); if (texture == null) System.out.println("Cannot load texture from " + fn); else { //System.out.println( "\t\tNumber Of MIPMAPS->" + texture.numMipMapLevels() ); texture.setMinFilter(Texture.NICEST); texture.setMagFilter(Texture.NICEST); texture.setAnisotropicFilterMode(texture.ANISOTROPIC_SINGLE_VALUE); texture.setAnisotropicFilterDegree(4.f); //System.out.println("Loaded texture from " + fn); texture.setEnable(true); } } return texture; } /** * Target object to point camera, has effect only if viewerPositionObject is not set. * * @param object */ public void setViewerTargetObject(KinematicObject object) { this.viewerTargetObject = object; } /** * Object to place camera on, if nullptr then camera will be placed in fixed point set by setViewerPosition(). * * @param object */ public void setViewerPositionObject(KinematicObject object) { this.viewerPositionObject = object; if (object != null && zoomMode == ZoomModes.ZOOM_DYNAMIC) nextZoomMode(); } /** * Fixed camera position, has effect only if viewerPositionObject not set. * * @param position */ public void setViewerPosition(Vector3d position) { this.viewerPositionObject = null; this.viewerPosition = position; viewerTransform.setTranslation(viewerPosition); } /** * Camera position offset from object position when viewer placed on some object * * @param offset position offset */ public void setViewerPositionOffset(Vector3d offset) { this.viewerPositionOffset = offset; } /** * Set the "vehicle" object to use for switching views. * * @param object */ public void setVehicleViewObject(KinematicObject object) { this.vehicleViewObject = object; // if (rose != null) // rose.setBaseObject(object); } /** * Set the "gimbal" object to use for switching views. * * @param object */ public void setGimbalViewObject(KinematicObject object) { this.gimbalViewObject = object; } /** * Set the system being controlled. * * @param system */ public void setHilSystem(MAVLinkHILSystem system) { this.hilSystem = system; } /** * Sets the text of the simulation report. * * @param text */ public void setReportText(String text) { if (showReportText()) reportPanel.setText(text); } /** * Check whether to show the report text */ public boolean showReportText() { return reportPanel.isShowing() && !reportPaused; } /** * Show/hide the simulation report. * * @param text */ public void toggleReportPanel(boolean on) { if (reportPanel == null || (on && reportPanel.isShowing()) || (!on && !reportPanel.isShowing())) return; setReportPaused(!on); if (reportPanel.isShowing()) { reportPanelSize = reportPanel.getSize(); splitPane.setLeftComponent(null); splitPane.setDividerSize(0); } else { reportPanel.setPreferredSize(reportPanelSize); splitPane.setLeftComponent(reportPanel); splitPane.setDividerSize((int)UIManager.get("SplitPane.dividerSize")); } splitPane.resetToPreferredSizes(); revalidate(); } public void toggleReportPanel() { this.toggleReportPanel(!reportPanel.isShowing()); } /** * Toggles updates of the report panel text. * * @param pause */ public void setReportPaused(boolean pause) { reportPaused = pause; reportPanel.setIsFocusable(pause); if (pause) ReportUpdater.setUpdateFreq(0L); else ReportUpdater.resetUpdateFreq(); } public void setShowOverlay(boolean showOverlay) { this.showOverlay = showOverlay; } /** * Toggles scene renderer antialiasing on/off */ public void setAAEnabled(boolean enable) { view.setSceneAntialiasingEnable(enable); if (showOverlay) ((CustomCanvas3D)canvas).setAA(enable); } public OutputStream getOutputStream() { return outputStream; } public void setZoomMode(ZoomModes zoomMode) { if (zoomMode == ZoomModes.ZOOM_DYNAMIC && viewType != ViewTypes.VIEW_STATIC) nextZoomMode(); else this.zoomMode = zoomMode; } public void setDynZoomDistance(float dynZoomDistance) { if (dynZoomDistance < 0.5f) dynZoomDistance = 0.5f; else { double dist = getVectorToTargetObject(viewerPosition, viewerTargetObject).length(); if (dynZoomDistance > dist + defaultDZDistance) dynZoomDistance = (float)dist + defaultDZDistance; } this.dynZoomDistance = dynZoomDistance; } public void setFieldOfView(double fov) { fov = Math.max(Math.min(fov, 2.7), 0.001); view.setFieldOfView(fov); currentFOV = fov; } public void setViewType(ViewTypes v) { switch (v) { case VIEW_STATIC : // Put camera on static point and point to vehicle if (this.viewType != ViewTypes.VIEW_STATIC && vehicleViewObject != null) { this.viewType = ViewTypes.VIEW_STATIC; Vector3d pos = new Vector3d(viewerGroundOffset); pos.z = (pos.z + world.getEnvironment().getGroundLevel()); this.setViewerPosition(pos); this.setViewerTargetObject(vehicleViewObject); } break; case VIEW_FPV : // Put camera on vehicle (FPV) if (this.viewType != ViewTypes.VIEW_FPV && vehicleViewObject != null) { this.viewType = ViewTypes.VIEW_FPV; this.setViewerPositionObject(vehicleViewObject); this.setViewerPositionOffset(new Vector3d(-0.0f, 0.0f, -0.3f)); // Offset from vehicle center } break; case VIEW_GIMBAL : if (this.viewType != ViewTypes.VIEW_GIMBAL && gimbalViewObject != null) { this.viewType = ViewTypes.VIEW_GIMBAL; this.setViewerPositionObject(gimbalViewObject); this.setViewerPositionOffset(new Vector3d(0.0f, 0.0f, 0.0f)); } else System.out.println("Unable to set view, gimbal not mounted."); break; } } private void nextZoomMode() { if (zoomMode == ZoomModes.ZOOM_NONE && viewType == ViewTypes.VIEW_STATIC) { zoomMode = ZoomModes.ZOOM_DYNAMIC; } else if (zoomMode == ZoomModes.ZOOM_FIXED) { zoomMode = ZoomModes.ZOOM_NONE; view.setFieldOfView(defaultFOV); } else { zoomMode = ZoomModes.ZOOM_FIXED; view.setFieldOfView(currentFOV); } } public void resetView() { tmp_m3d1.rotZ(Math.PI); tmp_m3d2.rotY(PI_2); tmp_m3d1.mul(tmp_m3d2); tmp_m3d2.rotZ(-PI_2); tmp_m3d1.mul(tmp_m3d2); viewerTransform.setRotation(tmp_m3d1); } public Vector3d getVectorToTargetObject(Vector3d from, KinematicObject objTo) { Vector3d ret = new Vector3d(); ret.sub(objTo.getPosition(), from); return ret; } private void updateVisualizer() { double dist; synchronized (world) { // Synchronize with "world" thread try { // Update branch groups of all kinematic objects for (WorldObject object : world.getObjects()) { if (object instanceof KinematicObject) { tmp_bGrp = ((KinematicObject) object).getBranchGroup(); if (tmp_bGrp != null) { ((KinematicObject) object).updateBranchGroup(); } } } // Update view platform if (viewerPositionObject != null) { // Camera on object viewerPosition.set(viewerPositionOffset); viewerPositionObject.getRotation().transform(viewerPosition); viewerPosition.add(viewerPositionObject.getPosition()); viewerTransform.setTranslation(viewerPosition); tmp_m3d1.set(viewerPositionObject.getRotation()); tmp_m3d2.rotZ(PI_2); tmp_m3d1.mul(tmp_m3d2); tmp_m3d2.rotX(-PI_2); tmp_m3d1.mul(tmp_m3d2); viewerTransform.setRotation(tmp_m3d1); } else if (viewerTargetObject != null) { // Fixed-position camera, point camera to target tmp_v3d = viewerTargetObject.getPosition(); dist = getVectorToTargetObject(viewerPosition, viewerTargetObject).length(); tmp_m3d1.rotZ(Math.PI); tmp_m3d2.rotY(PI_2); tmp_m3d1.mul(tmp_m3d2); tmp_m3d2.rotZ(-PI_2); tmp_m3d1.mul(tmp_m3d2); tmp_m3d2.rotY(-Math.atan2(tmp_v3d.y - viewerPosition.y, tmp_v3d.x - viewerPosition.x)); tmp_m3d1.mul(tmp_m3d2); tmp_m3d2.rotX(-Math.asin((tmp_v3d.z - viewerPosition.z) / dist)); tmp_m3d1.mul(tmp_m3d2); viewerTransform.setRotation(tmp_m3d1); if (zoomMode == ZoomModes.ZOOM_DYNAMIC) { if (dist > dynZoomDistance) view.setFieldOfView(dynZoomDistance / dist * currentFOV); else view.setFieldOfView(currentFOV); } } viewerTransformGroup.setTransform(viewerTransform); } catch (BadTransformException e) { e.printStackTrace(); } } } /* * Reset Rotation, Acceleration, Velocity */ private void resetObjectRAV(KinematicObject obj, boolean resetPos) { if (obj == null) return; Vector3f oldpos = new Vector3f(obj.getPosition()); obj.resetObjectParameters(); if (!resetPos) moveObject(obj, oldpos, true); else obj.setIgnoreGravity(false); } /* * Rotate object in steps */ private void rotateObject(KinematicObject obj, Vector3f vec, float deg) { if (obj == null) return; Matrix3d rot = obj.getRotation(); Matrix3d r = new Matrix3d(); if (vec == null) { r.rotZ(0.0); } else { AxisAngle4f aa = new AxisAngle4f(vec, (float)Math.toRadians(deg)); r.set(aa); } rot.mulNormalize(r); } /* * Set a continuous rotation rate of an object */ private void spinRateObject(KinematicObject obj, Vector3f vec) { if (obj == null) return; if (vec == null) obj.setRotationRate(new Vector3d()); else { // if still on ground, move it up so it can rotate if (obj.getPosition().z >= 0) moveObject(obj, new Vector3f(0f, 0f, -2.0f), false); obj.getRotationRate().add(new Vector3d(vec)); } } /* * Change position of an object */ private void moveObject(KinematicObject obj, Vector3f vec, boolean absolute) { if (obj == null) return; Vector3d pos = obj.getPosition(); if (absolute) pos.set(vec); else pos.add(new Vector3d(vec)); obj.setIgnoreGravity(pos.z < 0.0); // if (pos.z >= 0.0) // //world.getEnvironment().setG(null); // else // world.getEnvironment().setG(new Vector3d()); } /* * Manipulate wind in environment */ private void windDirection(Vector3f vec, boolean setBase, boolean setCurrent, boolean setDeviation) { if (vec == null) { if (setBase) world.getEnvironment().setWind(new Vector3d()); if (setCurrent) { world.getEnvironment().setCurrentWind(new Vector3d()); System.out.println("Wind reset to zero."); } if (setDeviation) { world.getEnvironment().setWindDeviation(new Vector3d()); System.out.println("Wind deviation reset to zero."); } } else { Vector3d adj = new Vector3d(vec); if (setBase) world.getEnvironment().getWind().add(adj); if (setCurrent) { world.getEnvironment().getCurrentWind(null).add(adj); System.out.println("Wind vector is now " + ReportUtil.vector2str(world.getEnvironment().getCurrentWind(viewerPosition))); } if (setDeviation) { world.getEnvironment().world.getEnvironment().getWindDeviation().add(adj); System.out.println("Wind deviation is now " + ReportUtil.vector2str(world.getEnvironment().getWindDeviation())); } } } // //// private Classes // /* * Custom Canvas class for drawing optional overlay HUD */ private class CustomCanvas3D extends Canvas3D { private static final long serialVersionUID = 7144426579917281131L; private int[] overlayMargins = {10, 10}; // x, y from left/right bottom corner private Font font = new Font("SansSerif", Font.BOLD, 14); private Color txtColor = Color.white; private Color hdgColor = Color.magenta; private Color crsColor = Color.green; private Color windColor = Color.blue; // system messages overlay private Font msgFont = new Font("SansSerif", Font.PLAIN, 14); private Color msgColor = new Color(255, 255, 255, 240); private Color msgBgColor = new Color(202, 162, 0, 60); private BufferedImage compassOverlay; private J3DGraphics2D g2d; private Matrix3d m1 = new Matrix3d(); private AffineTransform affTrans = new AffineTransform(); private BufferedImage drawImg; private Graphics2D drawg2d; private Line2D.Float hdgLine; private Line2D.Float crsLine; private Line2D.Float windLine; private BasicStroke crsStroke = new BasicStroke(2.5f); // drawn last, on top private BasicStroke hdgStroke = new BasicStroke(4.0f); private BasicStroke wndStroke = new BasicStroke(5.5f); // drawn first, on bottom private RoundRectangle2D.Float msgBg = new RoundRectangle2D.Float(); private int[] overlaySize = new int[2]; // x, y private int[] messagesSize = {450, 600}; // x, y private int msgLineHeight = 15; private int halfW; private int fps = 1; private int framesCount = 0; private long frameTime = 0L; public CustomCanvas3D(GraphicsConfiguration gc, Dimension windowSize, int overlayWidth) { super(gc); g2d = this.getGraphics2D(); setAA(AA_ENABLED); // constrain overlay sizes if (overlayWidth > windowSize.getWidth() / 2) overlayWidth = (int) (windowSize.getWidth() / 2); if (overlayWidth + 45 > windowSize.getHeight() / 2) overlayWidth = (int) (windowSize.getHeight() / 2); if (messagesSize[0] > windowSize.getWidth() / 2) messagesSize[0] = (int) (windowSize.getWidth() / 2); if (messagesSize[1] > windowSize.getHeight() * 0.75) messagesSize[1] = (int) (windowSize.getHeight() * 0.75); overlaySize[0] = overlayWidth; overlaySize[1] = overlayWidth + 45; halfW = overlayWidth / 2; frameTime = System.nanoTime(); // drawing surface for vector lines drawImg = new BufferedImage(overlayWidth, overlayWidth, BufferedImage.TYPE_4BYTE_ABGR); drawg2d = drawImg.createGraphics(); // load and scale compass image for overlay URL file = null; compassOverlay = new BufferedImage(overlayWidth, overlayWidth, BufferedImage.TYPE_4BYTE_ABGR); try { file = new URL("file:./" + TEX_DIR + COMPASS_IMG); if (file != null) { Image img = ImageIO.read(file); img = img.getScaledInstance(overlayWidth, overlayWidth, Image.SCALE_SMOOTH); compassOverlay.createGraphics().drawImage(img, 0, 0, null); } } catch (IOException e) { System.out.println("Error, could not load image: " + TEX_DIR + COMPASS_IMG); System.out.println("Error message:" + e.getLocalizedMessage()); } // set up vector lines for HUD hdgLine = new Line2D.Float(0, 0, 0, halfW * -0.85f); crsLine = new Line2D.Float(0, 0, 0, halfW * -0.425f); windLine = new Line2D.Float(0, 0, 0, halfW * -0.425f); } public void setAA(boolean on) { if (on) { g2d.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON); g2d.setRenderingHint(RenderingHints.KEY_RENDERING, RenderingHints.VALUE_RENDER_QUALITY); g2d.setRenderingHint(RenderingHints.KEY_ALPHA_INTERPOLATION, RenderingHints.VALUE_ALPHA_INTERPOLATION_QUALITY); g2d.setRenderingHint(RenderingHints.KEY_INTERPOLATION, RenderingHints.VALUE_INTERPOLATION_BICUBIC); } else { g2d.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_OFF); g2d.setRenderingHint(RenderingHints.KEY_RENDERING, RenderingHints.VALUE_RENDER_SPEED); g2d.setRenderingHint(RenderingHints.KEY_ALPHA_INTERPOLATION, RenderingHints.VALUE_ALPHA_INTERPOLATION_SPEED); g2d.setRenderingHint(RenderingHints.KEY_INTERPOLATION, RenderingHints.VALUE_INTERPOLATION_BILINEAR); } } // we draw the HUD/overlay here public void postRender() { if (!showOverlay) return; int x = overlayMargins[0]; int y = this.getHeight() - overlaySize[1] - overlayMargins[1]; double z, dZ, norm; Vector3d vect; clearDrawing(); // compass rotation in relation to viewer viewerTransform.get(m1); dZ = -Math.atan2(m1.getElement(1, 0), m1.getElement(0, 0)) + Math.toRadians(90.0); affTrans.setToRotation(dZ, halfW, halfW); drawg2d.setTransform(affTrans); drawg2d.drawImage(compassOverlay, 0, 0, this); // wind line in relation to viewer vect = world.getEnvironment().getCurrentWind(viewerPosition); norm = Math.sqrt(vect.x * vect.x + vect.y * vect.y); affTrans.setToTranslation(halfW, halfW); affTrans.rotate(vect.x, vect.y); affTrans.rotate(dZ); // scale length and width based on wind speed affTrans.scale(Math.max(Math.min(Math.abs(vect.z) * 0.5, 10.0), 1.0), Math.min(norm * 0.2, halfW * 0.85)); drawg2d.setTransform(affTrans); drawg2d.setColor(windColor); drawg2d.setStroke(wndStroke); drawg2d.draw(windLine); if (vehicleViewObject != null) { // heading line m1 = (Matrix3d) vehicleViewObject.getRotation().clone(); z = Math.atan2(m1.getElement(1, 0), m1.getElement(0, 0)); affTrans.setToTranslation(halfW, halfW); affTrans.rotate(z + dZ); drawg2d.setTransform(affTrans); drawg2d.setColor(hdgColor); drawg2d.setStroke(hdgStroke); drawg2d.draw(hdgLine); // course over ground line vect = vehicleViewObject.getVelocity(); z = Math.atan2(vect.y, vect.x); norm = Math.sqrt(vect.x * vect.x + vect.y * vect.y); affTrans.setToTranslation(halfW, halfW); affTrans.rotate(z + dZ); // scale length and width based on vehicle speed affTrans.scale(Math.max(Math.min(Math.abs(vect.z) * 0.5, 10.0), 1.0), Math.min(norm * 0.2, halfW * 0.85)); drawg2d.setTransform(affTrans); drawg2d.setColor(crsColor); drawg2d.setStroke(crsStroke); drawg2d.draw(crsLine); } // now draw the composed compass + vectors image on the main J3DGraphics2D g2d.drawImage(drawImg, x, y, this); // draw all HUD text items g2d.setFont(font); g2d.setColor(txtColor); y += drawImg.getHeight() + 25; String zmode = zoomMode == ZoomModes.ZOOM_NONE ? "Fixed" : zoomMode == ZoomModes.ZOOM_DYNAMIC ? "Dynamic" : "Manual"; if (zoomMode == ZoomModes.ZOOM_DYNAMIC) zmode += String.format(" @ %.2fm", dynZoomDistance); zmode += String.format(" FOV: %.2f\u00b0", Math.toDegrees(view.getFieldOfView())); g2d.drawString("Zoom mode: " + zmode, x, y); y += 20; g2d.drawString(String.format("FPS: %3d", fps), x, y); x += 70; g2d.setColor(hdgColor); g2d.drawString("HDG", x, y); x += 40; g2d.setColor(crsColor); g2d.drawString("CRS", x, y); x += 40; g2d.setColor(windColor); g2d.drawString("WND", x, y); // messages on the bottom right if (msgOutputStream.getListLen() > 0) { x = this.getWidth() - messagesSize[0] - overlayMargins[0]; int h = Math.min(messagesSize[1], msgOutputStream.getListLen() * msgLineHeight + 5); y = this.getHeight() - h - overlayMargins[1]; msgBg.setRoundRect(x, y, messagesSize[0], h, 15, 15); g2d.setColor(msgBgColor); g2d.draw(msgBg); g2d.fill(msgBg); x += 10; y += msgLineHeight; g2d.setFont(msgFont); g2d.setColor(msgColor); for (String msg : msgOutputStream.getStrings()) { g2d.drawString(msg, x, y); y += msgLineHeight; if (y > this.getHeight()) break; } } g2d.flush(false); ++framesCount; if (System.nanoTime() - frameTime >= (long)1e9) { fps = framesCount; framesCount = 0; frameTime = System.nanoTime(); } } private void clearDrawing() { // clear drawing image affTrans.setToIdentity(); drawg2d.setTransform(affTrans); drawg2d.setComposite(AlphaComposite.getInstance(AlphaComposite.CLEAR, 0.0f)); drawg2d.fillRect(0, 0, overlaySize[0], overlaySize[0]); drawg2d.setComposite(AlphaComposite.getInstance(AlphaComposite.SRC_OVER, 1.0f)); drawg2d.setColor(Color.BLACK); } } /* * KeyboardHandler */ public class KeyboardHandler extends KeyAdapter { public BitSet keyBits = new BitSet(256); @Override public void keyReleased(KeyEvent e) { keyBits.clear(e.getKeyCode()); checkCumulativeKeys(); switch (e.getKeyCode()) { // View swtich keys case KeyEvent.VK_F : setViewType(ViewTypes.VIEW_FPV); break; case KeyEvent.VK_S : setViewType(ViewTypes.VIEW_STATIC); break; case KeyEvent.VK_G : setViewType(ViewTypes.VIEW_GIMBAL); break; // reporting panel case KeyEvent.VK_R : toggleReportPanel(); break; // pause/start report updates case KeyEvent.VK_T : setReportPaused(!reportPaused); break; // toggle zoom mode fixed/dynamic/manual case KeyEvent.VK_Z : nextZoomMode(); break; // zoom reset case KeyEvent.VK_0 : case KeyEvent.VK_ENTER : zoomMode = ZoomModes.ZOOM_NONE; setFieldOfView(defaultFOV); setDynZoomDistance(defaultDZDistance); break; // init sim mode case KeyEvent.VK_I : if (hilSystem != null) hilSystem.initMavLink(); break; // quit sim mode case KeyEvent.VK_Q : if (hilSystem != null) hilSystem.endSim(); break; // toggle HUD overlay case KeyEvent.VK_H : setShowOverlay(!showOverlay); break; // clear messages from HUD case KeyEvent.VK_C : msgOutputStream.clearMessages(); break; // show help text case KeyEvent.VK_F1 : msgOutputStream.clearMessages(); msgOutputStream.setNumOfMessages(50); Simulator.printKeyCommands(); msgOutputStream.resetNumOfMessages(); break; // exit app case KeyEvent.VK_ESCAPE : dispatchEvent(new WindowEvent(getWindows()[0], WindowEvent.WINDOW_CLOSING)); break; // full view and object reset case KeyEvent.VK_SPACE : resetObjectRAV(vehicleViewObject, true); resetView(); break; // vehicle object resets case KeyEvent.VK_NUMPAD5 : // reset wind if (keyBits.get(KeyEvent.VK_ALT)) windDirection(null, true, true, true); // reset vehicle rotation, etc else if (keyBits.get(KeyEvent.VK_CONTROL)) resetObjectRAV(vehicleViewObject, false); // reset only rotation rate else spinRateObject(vehicleViewObject, null); break; } } @Override public void keyPressed(KeyEvent e) { keyBits.set(e.getKeyCode()); checkCumulativeKeys(); switch (e.getKeyCode()) { // zoom in case KeyEvent.VK_PLUS : case KeyEvent.VK_ADD : case KeyEvent.VK_EQUALS : if (zoomMode == ZoomModes.ZOOM_DYNAMIC) setDynZoomDistance(dynZoomDistance * (1.0f - manZoomStep)); else { zoomMode = ZoomModes.ZOOM_FIXED; setFieldOfView(currentFOV * (1.0f - manZoomStep)); } break; // zoom out case KeyEvent.VK_MINUS : case KeyEvent.VK_SUBTRACT : if (zoomMode == ZoomModes.ZOOM_DYNAMIC) setDynZoomDistance(dynZoomDistance * (1.0f + manZoomStep)); else { zoomMode = ZoomModes.ZOOM_FIXED; setFieldOfView(currentFOV * (1.0f + manZoomStep)); } break; } } public void checkCumulativeKeys() { // how to move Vector3f dir = new Vector3f(); // how much to move float deg = keyBits.get(KeyEvent.VK_CONTROL) ? 5.0f : 1.0f; // magnitude of move (rotation magnitude is always 1) float m = keyBits.get(KeyEvent.VK_SHIFT) ? deg / 5.0f : 1.0f; if (keyBits.get(KeyEvent.VK_LEFT) || keyBits.get(KeyEvent.VK_KP_LEFT)) dir.x = (-m); if (keyBits.get(KeyEvent.VK_RIGHT) || keyBits.get(KeyEvent.VK_KP_RIGHT)) dir.x = (m); if (keyBits.get(KeyEvent.VK_UP) || keyBits.get(KeyEvent.VK_KP_UP)) dir.y = (-m); if (keyBits.get(KeyEvent.VK_DOWN) || keyBits.get(KeyEvent.VK_KP_DOWN)) dir.y = (m); if (keyBits.get(KeyEvent.VK_END) || keyBits.get(KeyEvent.VK_INSERT)) dir.z = (-m); if (keyBits.get(KeyEvent.VK_PAGE_DOWN) || keyBits.get(KeyEvent.VK_DELETE)) dir.z = (m); if (dir.length() != 0.0) { if (keyHandler.keyBits.get(KeyEvent.VK_ALT)) { // wind deviation dir.set(-dir.y, dir.x, dir.z); windDirection(dir, false, false, true); } else if (keyBits.get(KeyEvent.VK_SHIFT)) { // move vehicle dir.set(-dir.y, dir.x, dir.z); moveObject(vehicleViewObject, dir, false); } else // rotate vehicle rotateObject(vehicleViewObject, dir, deg); } // check for keypad events (rotation rate or wind force) if (keyBits.get(KeyEvent.VK_NUMPAD5)) return; dir = new Vector3f(); m = keyHandler.keyBits.get(KeyEvent.VK_CONTROL) ? 1.0f : 0.5f; if (keyBits.get(KeyEvent.VK_NUMPAD4)) dir.x = (-m); if (keyBits.get(KeyEvent.VK_NUMPAD6)) dir.x = (m); if (keyBits.get(KeyEvent.VK_NUMPAD8)) dir.y = (-m); if (keyBits.get(KeyEvent.VK_NUMPAD2)) dir.y = (m); if (keyBits.get(KeyEvent.VK_NUMPAD1)) dir.z = (-m); if (keyBits.get(KeyEvent.VK_NUMPAD3) || keyBits.get(KeyEvent.VK_NUMPAD7)) dir.z = (m); if (dir.length() != 0.0) { if (keyHandler.keyBits.get(KeyEvent.VK_ALT)) { // wind strength but not deviation dir.set(-dir.y, dir.x, -dir.z); windDirection(dir, true, true, false); } else // adjust vehicle spin rate spinRateObject(vehicleViewObject, dir); } } } // end KeyboardHandler /* * Thread updater */ class UpdateBehavior extends Behavior { private WakeupCondition condition = new WakeupOnElapsedFrames(0, false); @Override public void initialize() { wakeupOn(condition); } @Override @SuppressWarnings("rawtypes") public void processStimulus(Enumeration wakeup) { Object w; while (wakeup.hasMoreElements()) { w = wakeup.nextElement(); if (w instanceof WakeupOnElapsedFrames) { updateVisualizer(); } wakeupOn(condition); } } } /* * System message logger */ class MessageOutputStream extends OutputStream { private final int strcap = 16; // number of messages to store private final int bufcap = 80; // line length limit private int numOfMessages = strcap; private final StringBuffer buf = new StringBuffer(bufcap); private int buflen = 0; private final List<String> strings = new ArrayList<String>(strcap); private boolean mtx = false; @Override public void write(int b) throws IOException { char c = (char)b; buf.append(c); if (c == '\n' || ++buflen >= bufcap) this.flush(); } @Override public void flush() { if (mtx) // do not block return; mtx = true; while (strings.size() > numOfMessages) strings.remove(0); String line = buf.toString().replaceAll("(.+)[\\r\\n]", "$1"); if (!line.isEmpty()) strings.add(line); buflen = 0; buf.setLength(buflen); mtx = false; } public List<String> getStrings() { if (mtx) // do not block return new ArrayList<String>(); return new ArrayList<String>(strings); } public int getListLen() { return strings.size(); } public void clearMessages() { if (!mtx) strings.clear(); } public int getNumOfMessages() { return numOfMessages; } public void setNumOfMessages(int numOfMessages) { this.numOfMessages = numOfMessages; } public void resetNumOfMessages() { this.numOfMessages = strcap; } } }
Visualizer3D: use new textures, improve resolution of ground and reduce world size from 50km to 5km, which is enough for testing
src/me/drton/jmavsim/Visualizer3D.java
Visualizer3D: use new textures, improve resolution of ground
<ide><path>rc/me/drton/jmavsim/Visualizer3D.java <ide> public static final double PI_2 = Math.PI / 2d; <ide> <ide> public static final String TEX_DIR = "environment/"; // folder for all environment textures <del> public static final String SKY_TEXTURE = "earth3.jpg"; <del> public static final String GND_TEXTURE = "ground.jpg"; <add> <add> public static final String SKY_TEXTURE = "HDR_040_Field_Bg.jpg"; <add> //public static final String SKY_TEXTURE = "HDR_111_Parking_Lot_2_Bg.jpg"; <add> // the following has a lower resolution and reduces memory usage <add> //public static final String SKY_TEXTURE = "earth3.jpg"; <add> <add> public static final String GND_TEXTURE = "grass3.jpg"; <add> //public static final String GND_TEXTURE = "ground.jpg"; <ide> public static final String COMPASS_IMG = "compass_rose.png"; // for overlay HUD <ide> public static final Dimension WINDOW_SIZE = new Dimension(1024, 768); // default application window size <del> public static final float WORLD_SIZE = 50000.0f; // [m] size of world sphere <add> public static final float WORLD_SIZE = 5000.0f; // [m] size of world sphere <ide> public static final boolean AA_ENABLED = true; // default antialising for 3D scene <ide> public static final ViewTypes VIEW_TYPE = ViewTypes.VIEW_STATIC; // default view type <ide> public static final ZoomModes ZOOM_MODE = ZoomModes.ZOOM_DYNAMIC; // default zoom type <ide> tex = loadTexture(TEX_DIR + SKY_TEXTURE); <ide> skySphere.getAppearance().setTexture(tex); <ide> trans = new Transform3D(); <del> trans.setRotation(rot); <add> Matrix3d rotSky = new Matrix3d(); <add> rotSky.rotZ(-120d*Math.PI/180d); <add> rotSky.mul(rot); <add> trans.setRotation(rotSky); <ide> tg = new TransformGroup(trans); <ide> tg.addChild(skySphere); <del> <add> <ide> // Background (sky) <ide> Background bg = new Background(); <ide> bg.setApplicationBounds(sceneBounds); <ide> <ide> tex = loadTexture(TEX_DIR + GND_TEXTURE); <ide> if (tex != null) <del> tiles = (int)(WORLD_SIZE / tex.getWidth()) * 10; <add> tiles = (int)(WORLD_SIZE / tex.getWidth()) * 800; <ide> <ide> QuadArray plane = new QuadArray(4, GeometryArray.COORDINATES | GeometryArray.TEXTURE_COORDINATE_2 ); <ide> plane.setCoordinate(0, new Point3d(-side, side, dZ));
Java
apache-2.0
error: pathspec 'wink-providers/wink-json-provider/src/test/org/apache/wink/providers/json/internal/JSONProviderTest.java' did not match any file(s) known to git
2746525c522c8d98909f00de7c15751f874a2178
1
os890/wink_patches,apache/wink,apache/wink,apache/wink,os890/wink_patches
/******************************************************************************* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * *******************************************************************************/ package org.apache.wink.server.internal.providers.entity; import java.io.StringReader; import java.util.Collections; import javax.ws.rs.Consumes; import javax.ws.rs.GET; import javax.ws.rs.POST; import javax.ws.rs.Path; import javax.ws.rs.Produces; import javax.ws.rs.core.MediaType; import javax.xml.bind.JAXBElement; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlAttribute; import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlElementRef; import javax.xml.bind.annotation.XmlRootElement; import javax.xml.bind.annotation.XmlType; import javax.xml.bind.annotation.XmlValue; import javax.xml.namespace.QName; import org.apache.wink.common.annotations.Asset; import org.apache.wink.common.model.json.JSONUtils; import org.apache.wink.common.model.synd.SyndEntry; import org.apache.wink.common.model.synd.SyndFeed; import org.apache.wink.common.model.synd.SyndText; import org.apache.wink.server.internal.servlet.MockServletInvocationTest; import org.apache.wink.test.mock.MockRequestConstructor; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; import org.json.JSONTokener; import org.springframework.mock.web.MockHttpServletRequest; import org.springframework.mock.web.MockHttpServletResponse; public class JsonProviderTest extends MockServletInvocationTest { @Override protected Class<?>[] getClasses() { return new Class<?>[] {TestResource.class}; } private static final SyndFeed SYND_FEED = new SyndFeed(new SyndText("title"), "id"); private static final SyndEntry SYND_ENTRY = new SyndEntry( new SyndText("entry title"), "entry:id"); private static final String JSON_FEED = "{\"feed\": {\n" + " \"@xmlns\": {\"$\": \"http:\\/\\/www.w3.org\\/2005\\/Atom\"},\n" + " \"id\": {\n" + " \"@xmlns\": {\"$\": \"http:\\/\\/www.w3.org\\/2005\\/Atom\"},\n" + " \"$\": \"id\"\n" + " },\n" + " \"title\": {\n" + " \"@type\": \"text\",\n" + " \"@xmlns\": {\"$\": \"http:\\/\\/www.w3.org\\/2005\\/Atom\"},\n" + " \"$\": \"title\"\n" + " }\n" + "}}"; private static final String JSON = "{\"entry\": {\n" + " \"id\": {\"$\": \"entry:id\"},\n" + " \"title\": {\n" + " \"@type\": \"text\",\n" + " \"$\": \"entry title\"\n" + " }\n" + "}}"; private static final String JSON_ARRAY = "[" + JSON + ", {\"test\":\"ing\"}]"; private static final String JSON_AS_ATOM_ENTRY = "{\"entry\": {\n" + " \"@xmlns\": {\"$\": \"http:\\/\\/www.w3.org\\/2005\\/Atom\"},\n" + " \"id\": {\n" + " \"@xmlns\": {\"$\": \"http:\\/\\/www.w3.org\\/2005\\/Atom\"},\n" + " \"$\": \"entry:id\"\n" + " },\n" + " \"title\": {\n" + " \"@type\": \"text\",\n" + " \"@xmlns\": {\"$\": \"http:\\/\\/www.w3.org\\/2005\\/Atom\"},\n" + " \"$\": \"entry title\"\n" + " }\n" + "}}"; private void compairJsonContent(final String expected, final String actual) throws JSONException { JSONObject result = JSONUtils.objectForString(actual); JSONObject want = JSONUtils.objectForString(expected); assertTrue(JSONUtils.equals(want, result)); } @Path("test") public static class TestResource { @GET @Path("json") @Produces("application/json") public JSONObject getJson() throws Exception { return new JSONObject(JSON); } @POST @Path("json") @Consumes(MediaType.APPLICATION_JSON) @Produces(MediaType.APPLICATION_JSON) public JSONObject postJson(JSONObject object) throws Exception { return object.put("foo", "bar"); } @GET @Path("jsonarray") @Produces(MediaType.APPLICATION_JSON) public JSONArray getJsonArray() throws Exception { return new JSONArray(JSON_ARRAY); } @POST @Path("jsonarray") @Consumes(MediaType.APPLICATION_JSON) @Produces(MediaType.APPLICATION_JSON) public JSONArray postJson(JSONArray array) throws Exception { return array.put(Collections.singletonMap("foo", "bar")); } @GET @Path("jsonfeed") @Produces("application/json") public SyndFeed getJsonFeed() throws Exception { return SYND_FEED; } @GET @Path("jaxb") @Produces("application/json") public Entry getJAXB() throws Exception { Entry entry = TestJAXBAsset.getJAXBEntry(); return entry; } @GET @Path("jaxbelement") @Produces("application/json") public JAXBElement<Entry> getJAXBElement() throws Exception { Entry entry = TestJAXBAsset.getJAXBEntry(); return new JAXBElement<Entry>(new QName("entry"), Entry.class, entry); } @GET @Path("atom") @Produces("application/json") public SyndEntry getAtom() throws Exception { return SYND_ENTRY; } @GET @Path("jsonasset") @Produces("application/json") public TestJsonAsset getJsonAsset() throws Exception { return new TestJsonAsset(); } @GET @Path("atomasset") @Produces("application/json") public TestOtherMediaTypeAsset getAtomAsset() throws Exception { return new TestOtherMediaTypeAsset(); } @GET @Path("jaxbasset") @Produces("application/json") public TestJAXBAsset getJAXBAsset() throws Exception { return new TestJAXBAsset(); } } @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "Entry", propOrder = {"id", "title"}) @XmlRootElement(name = "entry") public static class Entry { @XmlElement(name = "id") public String id; @XmlElementRef public Title title; } @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "Title", propOrder = {"type"}) @XmlRootElement(name = "title") public static class Title { @XmlAttribute(name = "type") public String type; @XmlValue public String value; } @Asset public static class TestJsonAsset { @Produces(MediaType.APPLICATION_JSON) public JSONObject getJSONObject() { try { return new JSONObject(JSON); } catch (JSONException e) { throw new RuntimeException(e); } } } @Asset public static class TestOtherMediaTypeAsset { public MediaType getJsonXmlMediaType() { return MediaType.APPLICATION_XML_TYPE; } @Produces(MediaType.APPLICATION_JSON) public Entry getJAXB() { return TestJAXBAsset.getJAXBEntry(); } @Consumes(MediaType.APPLICATION_JSON) public void setJAXB(Entry jaxbObject) { fail("json does not support read"); } } @Asset public static class TestJAXBAsset { public static Entry getJAXBEntry() { Title title = new Title(); title.type = "text"; title.value = "entry title"; Entry entry = new Entry(); entry.id = "entry:id"; entry.title = title; return entry; } @Produces(MediaType.APPLICATION_JSON) public Entry getJAXB() { return getJAXBEntry(); } @Consumes(MediaType.APPLICATION_JSON) public void setJAXB(Entry entry) { fail("setJAXB shouldn't be called for Json"); } } public void testGetJson() throws Exception { MockHttpServletRequest request = MockRequestConstructor.constructMockRequest("GET", "/test/json", "application/json"); MockHttpServletResponse response = invoke(request); assertEquals(200, response.getStatus()); compairJsonContent(JSON, response.getContentAsString()); } public void testPostJson() throws Exception { MockHttpServletRequest request = MockRequestConstructor.constructMockRequest("POST", "/test/json", "application/json", MediaType.APPLICATION_JSON, JSON.getBytes()); MockHttpServletResponse response = invoke(request); assertEquals(200, response.getStatus()); JSONObject result = JSONUtils.objectForString(response.getContentAsString()); JSONObject want = JSONUtils.objectForString(JSON).put("foo", "bar"); assertTrue(JSONUtils.equals(want, result)); } public void testGetJsonArray() throws Exception { MockHttpServletRequest request = MockRequestConstructor.constructMockRequest("GET", "/test/jsonarray", "application/json"); MockHttpServletResponse response = invoke(request); assertEquals(200, response.getStatus()); JSONArray result = new JSONArray(new JSONTokener(new StringReader(response.getContentAsString()))); JSONArray want = new JSONArray(JSON_ARRAY); assertTrue(JSONUtils.equals(want, result)); } public void testPostJsonArray() throws Exception { MockHttpServletRequest request = MockRequestConstructor.constructMockRequest("POST", "/test/jsonarray", "application/json", MediaType.APPLICATION_JSON, JSON_ARRAY.getBytes()); MockHttpServletResponse response = invoke(request); assertEquals(200, response.getStatus()); JSONArray result = new JSONArray(new JSONTokener(new StringReader(response.getContentAsString()))); JSONArray want = new JSONArray(JSON_ARRAY).put(Collections.singletonMap("foo", "bar")); assertTrue(JSONUtils.equals(want, result)); } public void testGetJsonFeed() throws Exception { MockHttpServletRequest request = MockRequestConstructor .constructMockRequest("GET", "/test/jsonfeed", "application/json"); MockHttpServletResponse response = invoke(request); assertEquals(200, response.getStatus()); compairJsonContent(JSON_FEED, response.getContentAsString()); } public void testGetJsonFromJAXB() throws Exception { MockHttpServletRequest request = MockRequestConstructor.constructMockRequest("GET", "/test/jaxb", "application/json"); MockHttpServletResponse response = invoke(request); assertEquals(200, response.getStatus()); compairJsonContent(JSON, response.getContentAsString()); } public void testGetJsonFromJAXBElement() throws Exception { MockHttpServletRequest request = MockRequestConstructor.constructMockRequest("GET", "/test/jaxbelement", "application/json"); MockHttpServletResponse response = invoke(request); assertEquals(200, response.getStatus()); compairJsonContent(JSON, response.getContentAsString()); } public void testGetJsonFromAtom() throws Exception { MockHttpServletRequest request = MockRequestConstructor.constructMockRequest("GET", "/test/atom", "application/json"); MockHttpServletResponse response = invoke(request); assertEquals(200, response.getStatus()); compairJsonContent(JSON_AS_ATOM_ENTRY, response.getContentAsString()); } public void testGetJsonAsset() throws Exception { MockHttpServletRequest request = MockRequestConstructor.constructMockRequest("GET", "/test/jsonasset", "application/json"); MockHttpServletResponse response = invoke(request); assertEquals(200, response.getStatus()); compairJsonContent(JSON, response.getContentAsString()); } public void testGetJAXBAsset() throws Exception { MockHttpServletRequest request = MockRequestConstructor.constructMockRequest("GET", "/test/jaxbasset", "application/json"); MockHttpServletResponse response = invoke(request); assertEquals(200, response.getStatus()); compairJsonContent(JSON, response.getContentAsString()); } public void testGetAtomAsset() throws Exception { MockHttpServletRequest request = MockRequestConstructor.constructMockRequest("GET", "/test/atomasset", "application/json"); MockHttpServletResponse response = invoke(request); assertEquals(200, response.getStatus()); compairJsonContent(JSON, response.getContentAsString()); } }
wink-providers/wink-json-provider/src/test/org/apache/wink/providers/json/internal/JSONProviderTest.java
Re-add JSON test git-svn-id: 00d204a5454029cb4c4f043d44e9809460a5ea01@958012 13f79535-47bb-0310-9956-ffa450edef68
wink-providers/wink-json-provider/src/test/org/apache/wink/providers/json/internal/JSONProviderTest.java
Re-add JSON test
<ide><path>ink-providers/wink-json-provider/src/test/org/apache/wink/providers/json/internal/JSONProviderTest.java <add>/******************************************************************************* <add> * Licensed to the Apache Software Foundation (ASF) under one <add> * or more contributor license agreements. See the NOTICE file <add> * distributed with this work for additional information <add> * regarding copyright ownership. The ASF licenses this file <add> * to you under the Apache License, Version 2.0 (the <add> * "License"); you may not use this file except in compliance <add> * with the License. You may obtain a copy of the License at <add> * <add> * http://www.apache.org/licenses/LICENSE-2.0 <add> * <add> * Unless required by applicable law or agreed to in writing, <add> * software distributed under the License is distributed on an <add> * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY <add> * KIND, either express or implied. See the License for the <add> * specific language governing permissions and limitations <add> * under the License. <add> * <add> *******************************************************************************/ <add> <add>package org.apache.wink.server.internal.providers.entity; <add> <add>import java.io.StringReader; <add>import java.util.Collections; <add> <add>import javax.ws.rs.Consumes; <add>import javax.ws.rs.GET; <add>import javax.ws.rs.POST; <add>import javax.ws.rs.Path; <add>import javax.ws.rs.Produces; <add>import javax.ws.rs.core.MediaType; <add>import javax.xml.bind.JAXBElement; <add>import javax.xml.bind.annotation.XmlAccessType; <add>import javax.xml.bind.annotation.XmlAccessorType; <add>import javax.xml.bind.annotation.XmlAttribute; <add>import javax.xml.bind.annotation.XmlElement; <add>import javax.xml.bind.annotation.XmlElementRef; <add>import javax.xml.bind.annotation.XmlRootElement; <add>import javax.xml.bind.annotation.XmlType; <add>import javax.xml.bind.annotation.XmlValue; <add>import javax.xml.namespace.QName; <add> <add>import org.apache.wink.common.annotations.Asset; <add>import org.apache.wink.common.model.json.JSONUtils; <add>import org.apache.wink.common.model.synd.SyndEntry; <add>import org.apache.wink.common.model.synd.SyndFeed; <add>import org.apache.wink.common.model.synd.SyndText; <add>import org.apache.wink.server.internal.servlet.MockServletInvocationTest; <add>import org.apache.wink.test.mock.MockRequestConstructor; <add>import org.json.JSONArray; <add>import org.json.JSONException; <add>import org.json.JSONObject; <add>import org.json.JSONTokener; <add>import org.springframework.mock.web.MockHttpServletRequest; <add>import org.springframework.mock.web.MockHttpServletResponse; <add> <add>public class JsonProviderTest extends MockServletInvocationTest { <add> <add> @Override <add> protected Class<?>[] getClasses() { <add> return new Class<?>[] {TestResource.class}; <add> } <add> <add> private static final SyndFeed SYND_FEED = new SyndFeed(new SyndText("title"), "id"); <add> <add> private static final SyndEntry SYND_ENTRY = <add> new SyndEntry( <add> new SyndText("entry title"), <add> "entry:id"); <add> <add> private static final String JSON_FEED = <add> "{\"feed\": {\n" + " \"@xmlns\": {\"$\": \"http:\\/\\/www.w3.org\\/2005\\/Atom\"},\n" <add> + " \"id\": {\n" <add> + " \"@xmlns\": {\"$\": \"http:\\/\\/www.w3.org\\/2005\\/Atom\"},\n" <add> + " \"$\": \"id\"\n" <add> + " },\n" <add> + " \"title\": {\n" <add> + " \"@type\": \"text\",\n" <add> + " \"@xmlns\": {\"$\": \"http:\\/\\/www.w3.org\\/2005\\/Atom\"},\n" <add> + " \"$\": \"title\"\n" <add> + " }\n" <add> + "}}"; <add> <add> private static final String JSON = <add> "{\"entry\": {\n" + " \"id\": {\"$\": \"entry:id\"},\n" <add> + " \"title\": {\n" <add> + " \"@type\": \"text\",\n" <add> + " \"$\": \"entry title\"\n" <add> + " }\n" <add> + "}}"; <add> <add> private static final String JSON_ARRAY = "[" + JSON + ", {\"test\":\"ing\"}]"; <add> <add> private static final String JSON_AS_ATOM_ENTRY = <add> "{\"entry\": {\n" + " \"@xmlns\": {\"$\": \"http:\\/\\/www.w3.org\\/2005\\/Atom\"},\n" <add> + " \"id\": {\n" <add> + " \"@xmlns\": {\"$\": \"http:\\/\\/www.w3.org\\/2005\\/Atom\"},\n" <add> + " \"$\": \"entry:id\"\n" <add> + " },\n" <add> + " \"title\": {\n" <add> + " \"@type\": \"text\",\n" <add> + " \"@xmlns\": {\"$\": \"http:\\/\\/www.w3.org\\/2005\\/Atom\"},\n" <add> + " \"$\": \"entry title\"\n" <add> + " }\n" <add> + "}}"; <add> <add> private void compairJsonContent(final String expected, final String actual) <add> throws JSONException { <add> JSONObject result = JSONUtils.objectForString(actual); <add> JSONObject want = JSONUtils.objectForString(expected); <add> assertTrue(JSONUtils.equals(want, result)); <add> } <add> <add> @Path("test") <add> public static class TestResource { <add> <add> @GET <add> @Path("json") <add> @Produces("application/json") <add> public JSONObject getJson() throws Exception { <add> return new JSONObject(JSON); <add> } <add> <add> @POST <add> @Path("json") <add> @Consumes(MediaType.APPLICATION_JSON) <add> @Produces(MediaType.APPLICATION_JSON) <add> public JSONObject postJson(JSONObject object) throws Exception { <add> return object.put("foo", "bar"); <add> } <add> <add> @GET <add> @Path("jsonarray") <add> @Produces(MediaType.APPLICATION_JSON) <add> public JSONArray getJsonArray() throws Exception { <add> return new JSONArray(JSON_ARRAY); <add> } <add> <add> @POST <add> @Path("jsonarray") <add> @Consumes(MediaType.APPLICATION_JSON) <add> @Produces(MediaType.APPLICATION_JSON) <add> public JSONArray postJson(JSONArray array) throws Exception { <add> return array.put(Collections.singletonMap("foo", "bar")); <add> } <add> <add> @GET <add> @Path("jsonfeed") <add> @Produces("application/json") <add> public SyndFeed getJsonFeed() throws Exception { <add> return SYND_FEED; <add> } <add> <add> @GET <add> @Path("jaxb") <add> @Produces("application/json") <add> public Entry getJAXB() throws Exception { <add> Entry entry = TestJAXBAsset.getJAXBEntry(); <add> return entry; <add> } <add> <add> @GET <add> @Path("jaxbelement") <add> @Produces("application/json") <add> public JAXBElement<Entry> getJAXBElement() throws Exception { <add> Entry entry = TestJAXBAsset.getJAXBEntry(); <add> return new JAXBElement<Entry>(new QName("entry"), Entry.class, entry); <add> } <add> <add> @GET <add> @Path("atom") <add> @Produces("application/json") <add> public SyndEntry getAtom() throws Exception { <add> return SYND_ENTRY; <add> } <add> <add> @GET <add> @Path("jsonasset") <add> @Produces("application/json") <add> public TestJsonAsset getJsonAsset() throws Exception { <add> return new TestJsonAsset(); <add> } <add> <add> @GET <add> @Path("atomasset") <add> @Produces("application/json") <add> public TestOtherMediaTypeAsset getAtomAsset() throws Exception { <add> return new TestOtherMediaTypeAsset(); <add> } <add> <add> @GET <add> @Path("jaxbasset") <add> @Produces("application/json") <add> public TestJAXBAsset getJAXBAsset() throws Exception { <add> return new TestJAXBAsset(); <add> } <add> <add> } <add> <add> @XmlAccessorType(XmlAccessType.FIELD) <add> @XmlType(name = "Entry", propOrder = {"id", "title"}) <add> @XmlRootElement(name = "entry") <add> public static class Entry { <add> <add> @XmlElement(name = "id") <add> public String id; <add> @XmlElementRef <add> public Title title; <add> } <add> <add> @XmlAccessorType(XmlAccessType.FIELD) <add> @XmlType(name = "Title", propOrder = {"type"}) <add> @XmlRootElement(name = "title") <add> public static class Title { <add> <add> @XmlAttribute(name = "type") <add> public String type; <add> @XmlValue <add> public String value; <add> } <add> <add> @Asset <add> public static class TestJsonAsset { <add> <add> @Produces(MediaType.APPLICATION_JSON) <add> public JSONObject getJSONObject() { <add> try { <add> return new JSONObject(JSON); <add> } catch (JSONException e) { <add> throw new RuntimeException(e); <add> } <add> } <add> } <add> <add> @Asset <add> public static class TestOtherMediaTypeAsset { <add> <add> public MediaType getJsonXmlMediaType() { <add> return MediaType.APPLICATION_XML_TYPE; <add> } <add> <add> @Produces(MediaType.APPLICATION_JSON) <add> public Entry getJAXB() { <add> return TestJAXBAsset.getJAXBEntry(); <add> } <add> <add> @Consumes(MediaType.APPLICATION_JSON) <add> public void setJAXB(Entry jaxbObject) { <add> fail("json does not support read"); <add> } <add> } <add> <add> @Asset <add> public static class TestJAXBAsset { <add> <add> public static Entry getJAXBEntry() { <add> Title title = new Title(); <add> title.type = "text"; <add> title.value = "entry title"; <add> Entry entry = new Entry(); <add> entry.id = "entry:id"; <add> entry.title = title; <add> return entry; <add> } <add> <add> @Produces(MediaType.APPLICATION_JSON) <add> public Entry getJAXB() { <add> return getJAXBEntry(); <add> } <add> <add> @Consumes(MediaType.APPLICATION_JSON) <add> public void setJAXB(Entry entry) { <add> fail("setJAXB shouldn't be called for Json"); <add> } <add> } <add> <add> public void testGetJson() throws Exception { <add> MockHttpServletRequest request = <add> MockRequestConstructor.constructMockRequest("GET", "/test/json", "application/json"); <add> MockHttpServletResponse response = invoke(request); <add> assertEquals(200, response.getStatus()); <add> compairJsonContent(JSON, response.getContentAsString()); <add> } <add> <add> public void testPostJson() throws Exception { <add> MockHttpServletRequest request = <add> MockRequestConstructor.constructMockRequest("POST", <add> "/test/json", <add> "application/json", <add> MediaType.APPLICATION_JSON, <add> JSON.getBytes()); <add> MockHttpServletResponse response = invoke(request); <add> assertEquals(200, response.getStatus()); <add> JSONObject result = JSONUtils.objectForString(response.getContentAsString()); <add> JSONObject want = JSONUtils.objectForString(JSON).put("foo", "bar"); <add> assertTrue(JSONUtils.equals(want, result)); <add> } <add> <add> public void testGetJsonArray() throws Exception { <add> MockHttpServletRequest request = <add> MockRequestConstructor.constructMockRequest("GET", <add> "/test/jsonarray", <add> "application/json"); <add> MockHttpServletResponse response = invoke(request); <add> assertEquals(200, response.getStatus()); <add> JSONArray result = <add> new JSONArray(new JSONTokener(new StringReader(response.getContentAsString()))); <add> JSONArray want = new JSONArray(JSON_ARRAY); <add> assertTrue(JSONUtils.equals(want, result)); <add> } <add> <add> public void testPostJsonArray() throws Exception { <add> MockHttpServletRequest request = <add> MockRequestConstructor.constructMockRequest("POST", <add> "/test/jsonarray", <add> "application/json", <add> MediaType.APPLICATION_JSON, <add> JSON_ARRAY.getBytes()); <add> MockHttpServletResponse response = invoke(request); <add> assertEquals(200, response.getStatus()); <add> JSONArray result = <add> new JSONArray(new JSONTokener(new StringReader(response.getContentAsString()))); <add> JSONArray want = new JSONArray(JSON_ARRAY).put(Collections.singletonMap("foo", "bar")); <add> assertTrue(JSONUtils.equals(want, result)); <add> } <add> <add> public void testGetJsonFeed() throws Exception { <add> MockHttpServletRequest request = <add> MockRequestConstructor <add> .constructMockRequest("GET", "/test/jsonfeed", "application/json"); <add> MockHttpServletResponse response = invoke(request); <add> assertEquals(200, response.getStatus()); <add> compairJsonContent(JSON_FEED, response.getContentAsString()); <add> } <add> <add> public void testGetJsonFromJAXB() throws Exception { <add> MockHttpServletRequest request = <add> MockRequestConstructor.constructMockRequest("GET", "/test/jaxb", "application/json"); <add> MockHttpServletResponse response = invoke(request); <add> assertEquals(200, response.getStatus()); <add> compairJsonContent(JSON, response.getContentAsString()); <add> } <add> <add> public void testGetJsonFromJAXBElement() throws Exception { <add> MockHttpServletRequest request = <add> MockRequestConstructor.constructMockRequest("GET", <add> "/test/jaxbelement", <add> "application/json"); <add> MockHttpServletResponse response = invoke(request); <add> assertEquals(200, response.getStatus()); <add> compairJsonContent(JSON, response.getContentAsString()); <add> } <add> <add> public void testGetJsonFromAtom() throws Exception { <add> MockHttpServletRequest request = <add> MockRequestConstructor.constructMockRequest("GET", "/test/atom", "application/json"); <add> MockHttpServletResponse response = invoke(request); <add> assertEquals(200, response.getStatus()); <add> compairJsonContent(JSON_AS_ATOM_ENTRY, response.getContentAsString()); <add> } <add> <add> public void testGetJsonAsset() throws Exception { <add> MockHttpServletRequest request = <add> MockRequestConstructor.constructMockRequest("GET", <add> "/test/jsonasset", <add> "application/json"); <add> MockHttpServletResponse response = invoke(request); <add> assertEquals(200, response.getStatus()); <add> compairJsonContent(JSON, response.getContentAsString()); <add> } <add> <add> public void testGetJAXBAsset() throws Exception { <add> MockHttpServletRequest request = <add> MockRequestConstructor.constructMockRequest("GET", <add> "/test/jaxbasset", <add> "application/json"); <add> MockHttpServletResponse response = invoke(request); <add> assertEquals(200, response.getStatus()); <add> compairJsonContent(JSON, response.getContentAsString()); <add> } <add> <add> public void testGetAtomAsset() throws Exception { <add> MockHttpServletRequest request = <add> MockRequestConstructor.constructMockRequest("GET", <add> "/test/atomasset", <add> "application/json"); <add> MockHttpServletResponse response = invoke(request); <add> assertEquals(200, response.getStatus()); <add> compairJsonContent(JSON, response.getContentAsString()); <add> } <add> <add>}
JavaScript
mit
19e2ad458010142628f9be3eb13bf11ca1253ea5
0
WorldBank-Transport/ram-backend,WorldBank-Transport/Rural-Road-Accessibility,WorldBank-Transport/ram-backend,WorldBank-Transport/Rural-Road-Accessibility,WorldBank-Transport/Rural-Road-Accessibility,WorldBank-Transport/ram-backend
'use strict'; import { assert } from 'chai'; // import fs from 'fs'; import FormData from 'form-data'; import streamToPromise from 'stream-to-promise'; import initServer from '../app/services/server'; // import db from '../app/db'; import { setupStructure as setupDdStructure } from '../app/db/structure'; import { setupStructure as setupStorageStructure } from '../app/s3/structure'; import { fixMeUp } from './utils/data'; var options = { connection: {port: 2000, host: '0.0.0.0'} }; var instance; before(function (done) { initServer(options, function (_, server) { instance = server.hapi; instance.register(require('inject-then'), function (err) { if (err) throw err; done(); }); }); }); describe('Scenario source data', function () { before('Before - Project files', function () { this.timeout(5000); return setupDdStructure() .then(() => setupStorageStructure()) .then(() => fixMeUp()); }); describe('POST /projects/{projId}/scenarios/{scId}/source-data', function () { it('should error when data format is not multipart/form-data', function () { return instance.injectThen({ method: 'POST', url: '/projects/300/scenarios/300/source-data' }).then(res => { assert.equal(res.statusCode, 415, 'Status code is 415'); assert.equal(res.result.error, 'Unsupported Media Type'); }); }); it('should return 404 for a project not found', function () { let form = new FormData(); form.append('', ''); return streamToPromise(form) .then(payload => instance.injectThen({ method: 'POST', url: '/projects/300/scenarios/300/source-data', payload, headers: form.getHeaders() })) .then(res => { assert.equal(res.statusCode, 404, 'Status code is 404'); assert.equal(res.result.message, 'Project not found'); }); }); it('should return 404 for a scenario not found', function () { let form = new FormData(); form.append('', ''); return streamToPromise(form) .then(payload => instance.injectThen({ method: 'POST', url: '/projects/1000/scenarios/300/source-data', payload, headers: form.getHeaders() })) .then(res => { assert.equal(res.statusCode, 404, 'Status code is 404'); assert.equal(res.result.message, 'Scenario not found'); }); }); it('should return 400 when project is not pending', function () { let form = new FormData(); form.append('', ''); return streamToPromise(form) .then(payload => instance.injectThen({ method: 'POST', url: '/projects/2000/scenarios/2000/source-data', payload, headers: form.getHeaders() })) .then(res => { assert.equal(res.statusCode, 400, 'Status code is 400'); assert.equal(res.result.message, 'Project no longer in the setup phase. Source data can not be uploaded'); }); }); it('should error when source-type is not provided', function () { let form = new FormData(); form.append('', ''); return streamToPromise(form) .then(payload => instance.injectThen({ method: 'POST', url: '/projects/1000/scenarios/1000/source-data', payload, headers: form.getHeaders() })) .then(res => { assert.equal(res.statusCode, 400, 'Status code is 400'); assert.match(res.result.message, /"source-type" is required/); }); }); it('should error when source-name is not provided', function () { let form = new FormData(); form.append('source-type', 'file'); return streamToPromise(form) .then(payload => instance.injectThen({ method: 'POST', url: '/projects/1000/scenarios/1000/source-data', payload, headers: form.getHeaders() })) .then(res => { assert.equal(res.statusCode, 400, 'Status code is 400'); assert.match(res.result.message, /"source-name" is required/); }); }); it('should error when invalid source-name is provided', function () { let form = new FormData(); form.append('source-type', 'file'); form.append('source-name', 'invalid'); return streamToPromise(form) .then(payload => instance.injectThen({ method: 'POST', url: '/projects/1000/scenarios/1000/source-data', payload, headers: form.getHeaders() })) .then(res => { assert.equal(res.statusCode, 400, 'Status code is 400'); assert.equal(res.result.message, '"source-name" must be one of [poi, road-network]'); }); }); it('should error when invalid source-type is provided', function () { let form = new FormData(); form.append('source-type', 'invalid'); form.append('source-name', 'poi'); return streamToPromise(form) .then(payload => instance.injectThen({ method: 'POST', url: '/projects/1000/scenarios/1000/source-data', payload, headers: form.getHeaders() })) .then(res => { assert.equal(res.statusCode, 400, 'Status code is 400'); assert.equal(res.result.message, '"source-type" must be one of [osm, file]'); }); }); }); describe('POST /projects/{projId}/scenarios/{scId}/source-data -- file', function () { it('should error when file is missing', function () { let form = new FormData(); form.append('source-type', 'file'); form.append('source-name', 'road-network'); return streamToPromise(form) .then(payload => instance.injectThen({ method: 'POST', url: '/projects/1000/scenarios/1000/source-data', payload, headers: form.getHeaders() })) .then(res => { assert.equal(res.statusCode, 400, 'Status code is 400'); assert.equal(res.result.message, '"file" is required'); }); }); }); });
test/test-scenario-source-data.js
'use strict'; import { assert } from 'chai'; // import fs from 'fs'; import FormData from 'form-data'; import streamToPromise from 'stream-to-promise'; import Server from '../app/services/server'; // import db from '../app/db'; import { setupStructure as setupDdStructure } from '../app/db/structure'; import { setupStructure as setupStorageStructure } from '../app/s3/structure'; import { fixMeUp } from './utils/data'; var options = { connection: {port: 2000, host: '0.0.0.0'} }; var instance; before(function (done) { instance = Server(options).hapi; instance.register(require('inject-then'), function (err) { if (err) throw err; done(); }); }); // before(function (done) { // initServer(options, function (_, server) { // instance = server.hapi; // instance.register(require('inject-then'), function (err) { // if (err) throw err; // done(); // }); // }); // }); describe('Scenario source data', function () { before('Before - Project files', function () { this.timeout(5000); return setupDdStructure() .then(() => setupStorageStructure()) .then(() => fixMeUp()); }); describe('POST /projects/{projId}/scenarios/{scId}/source-data', function () { it('should error when data format is not multipart/form-data', function () { return instance.injectThen({ method: 'POST', url: '/projects/300/scenarios/300/source-data' }).then(res => { assert.equal(res.statusCode, 415, 'Status code is 415'); assert.equal(res.result.error, 'Unsupported Media Type'); }); }); it('should return 404 for a project not found', function () { let form = new FormData(); form.append('', ''); return streamToPromise(form) .then(payload => instance.injectThen({ method: 'POST', url: '/projects/300/scenarios/300/source-data', payload, headers: form.getHeaders() })) .then(res => { assert.equal(res.statusCode, 404, 'Status code is 404'); assert.equal(res.result.message, 'Project not found'); }); }); it('should return 404 for a scenario not found', function () { let form = new FormData(); form.append('', ''); return streamToPromise(form) .then(payload => instance.injectThen({ method: 'POST', url: '/projects/1000/scenarios/300/source-data', payload, headers: form.getHeaders() })) .then(res => { assert.equal(res.statusCode, 404, 'Status code is 404'); assert.equal(res.result.message, 'Scenario not found'); }); }); it('should return 400 when project is not pending', function () { let form = new FormData(); form.append('', ''); return streamToPromise(form) .then(payload => instance.injectThen({ method: 'POST', url: '/projects/2000/scenarios/2000/source-data', payload, headers: form.getHeaders() })) .then(res => { assert.equal(res.statusCode, 400, 'Status code is 400'); assert.equal(res.result.message, 'Project no longer in the setup phase. Source data can not be uploaded'); }); }); it('should error when source-type is not provided', function () { let form = new FormData(); form.append('', ''); return streamToPromise(form) .then(payload => instance.injectThen({ method: 'POST', url: '/projects/1000/scenarios/1000/source-data', payload, headers: form.getHeaders() })) .then(res => { assert.equal(res.statusCode, 400, 'Status code is 400'); assert.match(res.result.message, /"source-type" is required/); }); }); it('should error when source-name is not provided', function () { let form = new FormData(); form.append('source-type', 'file'); return streamToPromise(form) .then(payload => instance.injectThen({ method: 'POST', url: '/projects/1000/scenarios/1000/source-data', payload, headers: form.getHeaders() })) .then(res => { assert.equal(res.statusCode, 400, 'Status code is 400'); assert.match(res.result.message, /"source-name" is required/); }); }); it('should error when invalid source-name is provided', function () { let form = new FormData(); form.append('source-type', 'file'); form.append('source-name', 'invalid'); return streamToPromise(form) .then(payload => instance.injectThen({ method: 'POST', url: '/projects/1000/scenarios/1000/source-data', payload, headers: form.getHeaders() })) .then(res => { assert.equal(res.statusCode, 400, 'Status code is 400'); assert.equal(res.result.message, '"source-name" must be one of [poi, road-network]'); }); }); it('should error when invalid source-type is provided', function () { let form = new FormData(); form.append('source-type', 'invalid'); form.append('source-name', 'poi'); return streamToPromise(form) .then(payload => instance.injectThen({ method: 'POST', url: '/projects/1000/scenarios/1000/source-data', payload, headers: form.getHeaders() })) .then(res => { assert.equal(res.statusCode, 400, 'Status code is 400'); assert.equal(res.result.message, '"source-type" must be one of [osm, file]'); }); }); }); describe('POST /projects/{projId}/scenarios/{scId}/source-data -- file', function () { it('should error when file is missing', function () { let form = new FormData(); form.append('source-type', 'file'); form.append('source-name', 'road-network'); return streamToPromise(form) .then(payload => instance.injectThen({ method: 'POST', url: '/projects/1000/scenarios/1000/source-data', payload, headers: form.getHeaders() })) .then(res => { assert.equal(res.statusCode, 400, 'Status code is 400'); assert.equal(res.result.message, '"file" is required'); }); }); }); });
Fix server init on test
test/test-scenario-source-data.js
Fix server init on test
<ide><path>est/test-scenario-source-data.js <ide> import FormData from 'form-data'; <ide> import streamToPromise from 'stream-to-promise'; <ide> <del>import Server from '../app/services/server'; <add>import initServer from '../app/services/server'; <ide> // import db from '../app/db'; <ide> import { setupStructure as setupDdStructure } from '../app/db/structure'; <ide> import { setupStructure as setupStorageStructure } from '../app/s3/structure'; <ide> <ide> var instance; <ide> before(function (done) { <del> instance = Server(options).hapi; <del> instance.register(require('inject-then'), function (err) { <del> if (err) throw err; <del> done(); <add> initServer(options, function (_, server) { <add> instance = server.hapi; <add> instance.register(require('inject-then'), function (err) { <add> if (err) throw err; <add> <add> done(); <add> }); <ide> }); <ide> }); <del> <del>// before(function (done) { <del>// initServer(options, function (_, server) { <del>// instance = server.hapi; <del>// instance.register(require('inject-then'), function (err) { <del>// if (err) throw err; <del> <del>// done(); <del>// }); <del>// }); <del>// }); <ide> <ide> describe('Scenario source data', function () { <ide> before('Before - Project files', function () {
Java
apache-2.0
17c0c7d9d68614b8b5adb04ecb964abc7e5c73e9
0
vpavic/spring-boot,philwebb/spring-boot,chrylis/spring-boot,scottfrederick/spring-boot,philwebb/spring-boot,jxblum/spring-boot,aahlenst/spring-boot,jxblum/spring-boot,michael-simons/spring-boot,aahlenst/spring-boot,Buzzardo/spring-boot,chrylis/spring-boot,Buzzardo/spring-boot,dreis2211/spring-boot,jxblum/spring-boot,scottfrederick/spring-boot,vpavic/spring-boot,aahlenst/spring-boot,shakuzen/spring-boot,aahlenst/spring-boot,philwebb/spring-boot,philwebb/spring-boot,wilkinsona/spring-boot,mdeinum/spring-boot,philwebb/spring-boot,shakuzen/spring-boot,michael-simons/spring-boot,mdeinum/spring-boot,mbenson/spring-boot,mbenson/spring-boot,vpavic/spring-boot,michael-simons/spring-boot,chrylis/spring-boot,htynkn/spring-boot,wilkinsona/spring-boot,mdeinum/spring-boot,vpavic/spring-boot,dreis2211/spring-boot,mdeinum/spring-boot,mbenson/spring-boot,shakuzen/spring-boot,spring-projects/spring-boot,mbenson/spring-boot,scottfrederick/spring-boot,michael-simons/spring-boot,mbenson/spring-boot,scottfrederick/spring-boot,scottfrederick/spring-boot,jxblum/spring-boot,michael-simons/spring-boot,aahlenst/spring-boot,shakuzen/spring-boot,jxblum/spring-boot,spring-projects/spring-boot,michael-simons/spring-boot,chrylis/spring-boot,wilkinsona/spring-boot,shakuzen/spring-boot,htynkn/spring-boot,vpavic/spring-boot,htynkn/spring-boot,dreis2211/spring-boot,aahlenst/spring-boot,wilkinsona/spring-boot,wilkinsona/spring-boot,chrylis/spring-boot,vpavic/spring-boot,philwebb/spring-boot,Buzzardo/spring-boot,Buzzardo/spring-boot,scottfrederick/spring-boot,dreis2211/spring-boot,dreis2211/spring-boot,chrylis/spring-boot,wilkinsona/spring-boot,htynkn/spring-boot,jxblum/spring-boot,dreis2211/spring-boot,mdeinum/spring-boot,htynkn/spring-boot,mbenson/spring-boot,spring-projects/spring-boot,mdeinum/spring-boot,spring-projects/spring-boot,Buzzardo/spring-boot,shakuzen/spring-boot,Buzzardo/spring-boot,spring-projects/spring-boot,spring-projects/spring-boot,htynkn/spring-boot
/* * Copyright 2012-2021 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.boot.context.config; import org.springframework.boot.origin.Origin; import org.springframework.boot.origin.OriginProvider; import org.springframework.util.StringUtils; /** * A user specified location that can be {@link ConfigDataLocationResolver resolved} to * one or more {@link ConfigDataResource config data resources}. A * {@link ConfigDataLocation} is a simple wrapper around a {@link String} value. The exact * format of the value will depend on the underlying technology, but is usually a URL like * syntax consisting of a prefix and path. For example, {@code crypt:somehost/somepath}. * <p> * Locations can be mandatory or {@link #isOptional() optional}. Optional locations are * prefixed with {@code optional:}. * * @author Phillip Webb * @since 2.4.0 */ public final class ConfigDataLocation implements OriginProvider { /** * Prefix used to indicate that a {@link ConfigDataResource} is optional. */ public static final String OPTIONAL_PREFIX = "optional:"; private final boolean optional; private final String value; private final Origin origin; private ConfigDataLocation(boolean optional, String value, Origin origin) { this.value = value; this.optional = optional; this.origin = origin; } /** * Return the the location is optional and should ignore * {@link ConfigDataNotFoundException}. * @return if the location is optional */ public boolean isOptional() { return this.optional; } /** * Return the value of the location (always excluding any user specified * {@code optional:} prefix. * @return the location value */ public String getValue() { return this.value; } /** * Return if {@link #getValue()} has the specified prefix. * @param prefix the prefix to check * @return if the value has the prefix */ public boolean hasPrefix(String prefix) { return this.value.startsWith(prefix); } /** * Return {@link #getValue()} with the specified prefix removed. If the location does * not have the given prefix then the {@link #getValue()} is returned unchanged. * @param prefix the prefix to check * @return the value with the prefix removed */ public String getNonPrefixedValue(String prefix) { if (hasPrefix(prefix)) { return this.value.substring(prefix.length()); } return this.value; } @Override public Origin getOrigin() { return this.origin; } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (obj == null || getClass() != obj.getClass()) { return false; } ConfigDataLocation other = (ConfigDataLocation) obj; return this.value.equals(other.value); } @Override public int hashCode() { return this.value.hashCode(); } @Override public String toString() { return (!this.optional) ? this.value : OPTIONAL_PREFIX + this.value; } /** * Create a new {@link ConfigDataLocation} with a specific {@link Origin}. * @param origin the origin to set * @return a new {@link ConfigDataLocation} instance. */ ConfigDataLocation withOrigin(Origin origin) { return new ConfigDataLocation(this.optional, this.value, origin); } /** * Factory method to create a new {@link ConfigDataLocation} from a string. * @param location the location string * @return a {@link ConfigDataLocation} instance or {@code null} if no location was * provided */ public static ConfigDataLocation of(String location) { boolean optional = location != null && location.startsWith(OPTIONAL_PREFIX); String value = (!optional) ? location : location.substring(OPTIONAL_PREFIX.length()); if (!StringUtils.hasText(value)) { return null; } return new ConfigDataLocation(optional, value, null); } }
spring-boot-project/spring-boot/src/main/java/org/springframework/boot/context/config/ConfigDataLocation.java
/* * Copyright 2012-2020 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.boot.context.config; import org.springframework.boot.origin.Origin; import org.springframework.boot.origin.OriginProvider; import org.springframework.util.StringUtils; /** * A user specified location that can be {@link ConfigDataLocationResolver resolved} to * one or {@link ConfigDataResource config data resources}. A {@link ConfigDataLocation} * is a simple wrapper around a {@link String} value. The exact format of the value will * depend on the underlying technology, but is usually a URL like syntax consisting of a * prefix and path. For example, {@code crypt:somehost/somepath}. * <p> * Locations can be mandatory or {@link #isOptional() optional}. Optional locations are * prefixed with {@code optional:}. * * @author Phillip Webb * @since 2.4.0 */ public final class ConfigDataLocation implements OriginProvider { /** * Prefix used to indicate that a {@link ConfigDataResource} is optional. */ public static final String OPTIONAL_PREFIX = "optional:"; private final boolean optional; private final String value; private final Origin origin; private ConfigDataLocation(boolean optional, String value, Origin origin) { this.value = value; this.optional = optional; this.origin = origin; } /** * Return the the location is optional and should ignore * {@link ConfigDataNotFoundException}. * @return if the location is optional */ public boolean isOptional() { return this.optional; } /** * Return the value of the location (always excluding any user specified * {@code optional:} prefix. * @return the location value */ public String getValue() { return this.value; } /** * Return if {@link #getValue()} has the specified prefix. * @param prefix the prefix to check * @return if the value has the prefix */ public boolean hasPrefix(String prefix) { return this.value.startsWith(prefix); } /** * Return {@link #getValue()} with the specified prefix removed. If the location does * not have the given prefix then the {@link #getValue()} is returned unchanged. * @param prefix the prefix to check * @return the value with the prefix removed */ public String getNonPrefixedValue(String prefix) { if (hasPrefix(prefix)) { return this.value.substring(prefix.length()); } return this.value; } @Override public Origin getOrigin() { return this.origin; } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (obj == null || getClass() != obj.getClass()) { return false; } ConfigDataLocation other = (ConfigDataLocation) obj; return this.value.equals(other.value); } @Override public int hashCode() { return this.value.hashCode(); } @Override public String toString() { return (!this.optional) ? this.value : OPTIONAL_PREFIX + this.value; } /** * Create a new {@link ConfigDataLocation} with a specific {@link Origin}. * @param origin the origin to set * @return a new {@link ConfigDataLocation} instance. */ ConfigDataLocation withOrigin(Origin origin) { return new ConfigDataLocation(this.optional, this.value, origin); } /** * Factory method to create a new {@link ConfigDataLocation} from a string. * @param location the location string * @return a {@link ConfigDataLocation} instance or {@code null} if no location was * provided */ public static ConfigDataLocation of(String location) { boolean optional = location != null && location.startsWith(OPTIONAL_PREFIX); String value = (!optional) ? location : location.substring(OPTIONAL_PREFIX.length()); if (!StringUtils.hasText(value)) { return null; } return new ConfigDataLocation(optional, value, null); } }
Fix javadoc typo in ConfigDataLocation See gh-24660
spring-boot-project/spring-boot/src/main/java/org/springframework/boot/context/config/ConfigDataLocation.java
Fix javadoc typo in ConfigDataLocation
<ide><path>pring-boot-project/spring-boot/src/main/java/org/springframework/boot/context/config/ConfigDataLocation.java <ide> /* <del> * Copyright 2012-2020 the original author or authors. <add> * Copyright 2012-2021 the original author or authors. <ide> * <ide> * Licensed under the Apache License, Version 2.0 (the "License"); <ide> * you may not use this file except in compliance with the License. <ide> <ide> /** <ide> * A user specified location that can be {@link ConfigDataLocationResolver resolved} to <del> * one or {@link ConfigDataResource config data resources}. A {@link ConfigDataLocation} <del> * is a simple wrapper around a {@link String} value. The exact format of the value will <del> * depend on the underlying technology, but is usually a URL like syntax consisting of a <del> * prefix and path. For example, {@code crypt:somehost/somepath}. <add> * one or more {@link ConfigDataResource config data resources}. A <add> * {@link ConfigDataLocation} is a simple wrapper around a {@link String} value. The exact <add> * format of the value will depend on the underlying technology, but is usually a URL like <add> * syntax consisting of a prefix and path. For example, {@code crypt:somehost/somepath}. <ide> * <p> <ide> * Locations can be mandatory or {@link #isOptional() optional}. Optional locations are <ide> * prefixed with {@code optional:}.
Java
apache-2.0
7a89447ffa78c471c4e5d9a17464ca9631080ebe
0
DevStreet/FinanceAnalytics,jerome79/OG-Platform,jerome79/OG-Platform,codeaudit/OG-Platform,ChinaQuants/OG-Platform,DevStreet/FinanceAnalytics,jeorme/OG-Platform,McLeodMoores/starling,ChinaQuants/OG-Platform,jeorme/OG-Platform,jeorme/OG-Platform,jerome79/OG-Platform,DevStreet/FinanceAnalytics,jeorme/OG-Platform,nssales/OG-Platform,nssales/OG-Platform,nssales/OG-Platform,McLeodMoores/starling,McLeodMoores/starling,ChinaQuants/OG-Platform,McLeodMoores/starling,nssales/OG-Platform,ChinaQuants/OG-Platform,codeaudit/OG-Platform,DevStreet/FinanceAnalytics,codeaudit/OG-Platform,jerome79/OG-Platform,codeaudit/OG-Platform
/** * Copyright (C) 2011 - present by OpenGamma Inc. and the OpenGamma group of companies * * Please see distribution for license. */ package com.opengamma.financial.analytics; import java.util.Collections; import java.util.Map; import java.util.Set; import javax.time.Instant; import javax.time.InstantProvider; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.collect.Sets; import com.opengamma.engine.ComputationTarget; import com.opengamma.engine.ComputationTargetType; import com.opengamma.engine.function.AbstractFunction; import com.opengamma.engine.function.CompiledFunctionDefinition; import com.opengamma.engine.function.FunctionCompilationContext; import com.opengamma.engine.function.FunctionDefinition; import com.opengamma.engine.function.FunctionExecutionContext; import com.opengamma.engine.function.FunctionInputs; import com.opengamma.engine.function.FunctionInvoker; import com.opengamma.engine.function.FunctionParameters; import com.opengamma.engine.value.ComputedValue; import com.opengamma.engine.value.ValueProperties; import com.opengamma.engine.value.ValuePropertyNames; import com.opengamma.engine.value.ValueRequirement; import com.opengamma.engine.value.ValueSpecification; import com.opengamma.util.ArgumentChecker; /** * Wraps another function definition into a form that can work with one or more of its * inputs missing. */ public class MissingInputsFunction extends AbstractFunction implements CompiledFunctionDefinition, FunctionInvoker { private static final Logger s_logger = LoggerFactory.getLogger(MissingInputsFunction.class); /** * Value of the {@link ValuePropertyNames#AGGREGATION} property when one or more of * the inputs may be missing. */ public static final String AGGREGATION_STYLE_MISSING = "MissingInputs"; /** * Value of the {@link ValuePropertyNames#AGGREGATION} property when all of the inputs * must be available. */ public static final String AGGREGATION_STYLE_FULL = "Full"; private final FunctionDefinition _underlyingDefinition; private final CompiledFunctionDefinition _underlyingCompiled; private final FunctionInvoker _underlyingInvoker; public MissingInputsFunction(final FunctionDefinition underlying) { ArgumentChecker.notNull(underlying, "underlying"); _underlyingDefinition = underlying; if (underlying instanceof CompiledFunctionDefinition) { _underlyingCompiled = (CompiledFunctionDefinition) underlying; if (underlying instanceof FunctionInvoker) { _underlyingInvoker = (FunctionInvoker) underlying; } else { _underlyingInvoker = null; } } else { _underlyingCompiled = null; _underlyingInvoker = null; } } protected MissingInputsFunction(final CompiledFunctionDefinition underlying) { ArgumentChecker.notNull(underlying, "underlying"); _underlyingDefinition = underlying.getFunctionDefinition(); _underlyingCompiled = underlying; if (underlying instanceof FunctionInvoker) { _underlyingInvoker = (FunctionInvoker) underlying; } else { _underlyingInvoker = null; } } protected MissingInputsFunction(final FunctionInvoker underlying) { ArgumentChecker.notNull(underlying, "underlying"); _underlyingDefinition = null; _underlyingCompiled = null; _underlyingInvoker = underlying; } protected MissingInputsFunction create(final CompiledFunctionDefinition underlying) { return new MissingInputsFunction(underlying); } protected MissingInputsFunction create(final FunctionInvoker underlying) { return new MissingInputsFunction(underlying); } protected FunctionDefinition getUnderlyingDefinition() { return _underlyingDefinition; } protected CompiledFunctionDefinition getUnderlyingCompiled() { return _underlyingCompiled; } protected FunctionInvoker getUnderlyingInvoker() { return _underlyingInvoker; } protected String getAggregationStyleMissing() { return AGGREGATION_STYLE_MISSING; } protected String getAggregationStyleFull() { return AGGREGATION_STYLE_FULL; } // AbstractFunction @Override public void setUniqueId(final String identifier) { if (getUnderlyingDefinition() instanceof AbstractFunction) { ((AbstractFunction) getUnderlyingDefinition()).setUniqueId(identifier); } super.setUniqueId(identifier); } // FunctionDefinition @Override public void init(final FunctionCompilationContext context) { getUnderlyingDefinition().init(context); } @Override public CompiledFunctionDefinition compile(final FunctionCompilationContext context, final InstantProvider atInstant) { final CompiledFunctionDefinition underlying = getUnderlyingDefinition().compile(context, atInstant); if (underlying == getUnderlyingCompiled()) { s_logger.debug("Compiling underlying on {} gives self", this); return this; } else { s_logger.debug("Creating delegate for compiled underlying on {}", this); return create(underlying); } } @Override public String getShortName() { return getUnderlyingDefinition().getShortName(); } @Override public FunctionParameters getDefaultParameters() { return getUnderlyingDefinition().getDefaultParameters(); } // CompiledFunctionDefinition @Override public FunctionDefinition getFunctionDefinition() { return this; } @Override public ComputationTargetType getTargetType() { return getUnderlyingCompiled().getTargetType(); } @Override public boolean canApplyTo(final FunctionCompilationContext context, final ComputationTarget target) { return getUnderlyingCompiled().canApplyTo(context, target); } @Override public Set<ValueSpecification> getResults(final FunctionCompilationContext context, final ComputationTarget target) { final Set<ValueSpecification> underlyingResults = getUnderlyingCompiled().getResults(context, target); if (underlyingResults == null) { s_logger.debug("Underlying returned null for target {}", target); return null; } final Set<ValueSpecification> results = Sets.newHashSetWithExpectedSize(underlyingResults.size()); for (ValueSpecification underlyingResult : underlyingResults) { final ValueProperties.Builder properties = underlyingResult.getProperties().copy(); properties.with(ValuePropertyNames.AGGREGATION, getAggregationStyleFull(), getAggregationStyleMissing()); results.add(new ValueSpecification(underlyingResult.getValueName(), underlyingResult.getTargetSpecification(), properties.get())); } s_logger.debug("Returning results {}", results); return results; } @Override public Set<ValueRequirement> getRequirements(final FunctionCompilationContext context, final ComputationTarget target, final ValueRequirement desiredValue) { // User must have requested our aggregation style final ValueProperties resultConstraints = desiredValue.getConstraints(); final Set<String> aggregationStyle = resultConstraints.getValues(ValuePropertyNames.AGGREGATION); if (aggregationStyle == null) { s_logger.debug("No aggregation requirements on {}", desiredValue); return null; } // Requirement has all constraints asked of us (minus the aggregation style) final ValueProperties requirementConstraints = resultConstraints.withoutAny(ValuePropertyNames.AGGREGATION); final Set<ValueRequirement> requirements = getUnderlyingCompiled().getRequirements(context, target, new ValueRequirement(desiredValue.getValueName(), desiredValue.getTargetSpecification(), requirementConstraints)); s_logger.debug("Returning requirements {} for {}", requirements, desiredValue); return requirements; } @Override public boolean canHandleMissingRequirements() { return getUnderlyingCompiled().canHandleMissingRequirements(); } @Override public Set<ValueSpecification> getResults(final FunctionCompilationContext context, final ComputationTarget target, final Map<ValueSpecification, ValueRequirement> inputs) { final Set<ValueSpecification> underlyingResults = getUnderlyingCompiled().getResults(context, target, inputs); if (underlyingResults == null) { s_logger.debug("Underlying returned null inputs {}", inputs); return null; } final Set<ValueSpecification> results = Sets.newHashSetWithExpectedSize(underlyingResults.size() * 2); for (ValueSpecification underlyingResult : underlyingResults) { final ValueProperties properties = underlyingResult.getProperties(); if ((properties.getProperties() != null) && properties.getProperties().isEmpty()) { results.add(underlyingResult); } else { final ValueProperties.Builder builder = properties.copy(); builder.with(ValuePropertyNames.AGGREGATION, getAggregationStyleFull()); results.add(new ValueSpecification(underlyingResult.getValueName(), underlyingResult.getTargetSpecification(), builder.get())); builder.withoutAny(ValuePropertyNames.AGGREGATION).with(ValuePropertyNames.AGGREGATION, getAggregationStyleMissing()); results.add(new ValueSpecification(underlyingResult.getValueName(), underlyingResult.getTargetSpecification(), builder.get())); } } s_logger.debug("Returning results {} for {}", results, inputs); return results; } @Override public Set<ValueRequirement> getAdditionalRequirements(final FunctionCompilationContext context, final ComputationTarget target, final Set<ValueSpecification> inputs, Set<ValueSpecification> outputs) { final Set<ValueSpecification> underlyingOutputs = Sets.newHashSetWithExpectedSize(outputs.size()); for (ValueSpecification output : outputs) { final ValueProperties properties = output.getProperties().withoutAny(ValuePropertyNames.AGGREGATION); underlyingOutputs.add(new ValueSpecification(output.getValueName(), output.getTargetSpecification(), properties)); } return getUnderlyingCompiled().getAdditionalRequirements(context, target, inputs, underlyingOutputs); } @Override public Instant getEarliestInvocationTime() { return getUnderlyingCompiled().getEarliestInvocationTime(); } @Override public Instant getLatestInvocationTime() { return getUnderlyingCompiled().getLatestInvocationTime(); } @Override public FunctionInvoker getFunctionInvoker() { final FunctionInvoker underlying = getUnderlyingCompiled().getFunctionInvoker(); if (underlying == getUnderlyingInvoker()) { return this; } else { return create(underlying); } } // FunctionInvoker @Override public Set<ComputedValue> execute(final FunctionExecutionContext executionContext, final FunctionInputs inputs, final ComputationTarget target, final Set<ValueRequirement> desiredValues) { final Set<ValueRequirement> underlyingDesired = Sets.newHashSetWithExpectedSize(desiredValues.size()); for (ValueRequirement desiredValue : desiredValues) { final ValueProperties requirementConstraints = desiredValue.getConstraints().withoutAny(ValuePropertyNames.AGGREGATION); underlyingDesired.add(new ValueRequirement(desiredValue.getValueName(), desiredValue.getTargetSpecification(), requirementConstraints)); } final Set<ComputedValue> underlyingResults = getUnderlyingInvoker().execute(executionContext, inputs, target, underlyingDesired); if (underlyingResults == null) { return Collections.emptySet(); } final Set<ComputedValue> results = Sets.newHashSetWithExpectedSize(underlyingResults.size()); for (ComputedValue underlyingResult : underlyingResults) { final ValueSpecification resultSpec = underlyingResult.getSpecification(); final ValueProperties.Builder properties = resultSpec.getProperties().copy(); properties.with(ValuePropertyNames.AGGREGATION, getAggregationStyleMissing()); results.add(new ComputedValue(new ValueSpecification(resultSpec.getValueName(), resultSpec.getTargetSpecification(), properties.get()), underlyingResult.getValue())); if (inputs.getMissingValues().isEmpty()) { properties.withoutAny(ValuePropertyNames.AGGREGATION).with(ValuePropertyNames.AGGREGATION, getAggregationStyleFull()); results.add(new ComputedValue(new ValueSpecification(resultSpec.getValueName(), resultSpec.getTargetSpecification(), properties.get()), underlyingResult.getValue())); } } return results; } @Override public boolean canHandleMissingInputs() { return true; } }
projects/OG-Financial/src/com/opengamma/financial/analytics/MissingInputsFunction.java
/** * Copyright (C) 2011 - present by OpenGamma Inc. and the OpenGamma group of companies * * Please see distribution for license. */ package com.opengamma.financial.analytics; import java.util.Collections; import java.util.Map; import java.util.Set; import javax.time.Instant; import javax.time.InstantProvider; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.collect.Sets; import com.opengamma.engine.ComputationTarget; import com.opengamma.engine.ComputationTargetType; import com.opengamma.engine.function.AbstractFunction; import com.opengamma.engine.function.CompiledFunctionDefinition; import com.opengamma.engine.function.FunctionCompilationContext; import com.opengamma.engine.function.FunctionDefinition; import com.opengamma.engine.function.FunctionExecutionContext; import com.opengamma.engine.function.FunctionInputs; import com.opengamma.engine.function.FunctionInvoker; import com.opengamma.engine.function.FunctionParameters; import com.opengamma.engine.value.ComputedValue; import com.opengamma.engine.value.ValueProperties; import com.opengamma.engine.value.ValuePropertyNames; import com.opengamma.engine.value.ValueRequirement; import com.opengamma.engine.value.ValueSpecification; import com.opengamma.util.ArgumentChecker; /** * Wraps another function definition into a form that can work with one or more of its * inputs missing. */ public class MissingInputsFunction extends AbstractFunction implements CompiledFunctionDefinition, FunctionInvoker { private static final Logger s_logger = LoggerFactory.getLogger(MissingInputsFunction.class); /** * Value of the {@link ValuePropertyNames#AGGREGATION} property when one or more of * the inputs may be missing. */ public static final String AGGREGATION_STYLE_MISSING = "MissingInputs"; /** * Value of the {@link ValuePropertyNames#AGGREGATION} property when all of the inputs * must be available. */ public static final String AGGREGATION_STYLE_FULL = "Full"; private final FunctionDefinition _underlyingDefinition; private final CompiledFunctionDefinition _underlyingCompiled; private final FunctionInvoker _underlyingInvoker; public MissingInputsFunction(final FunctionDefinition underlying) { ArgumentChecker.notNull(underlying, "underlying"); _underlyingDefinition = underlying; if (underlying instanceof CompiledFunctionDefinition) { _underlyingCompiled = (CompiledFunctionDefinition) underlying; if (underlying instanceof FunctionInvoker) { _underlyingInvoker = (FunctionInvoker) underlying; } else { _underlyingInvoker = null; } } else { _underlyingCompiled = null; _underlyingInvoker = null; } } protected MissingInputsFunction(final CompiledFunctionDefinition underlying) { ArgumentChecker.notNull(underlying, "underlying"); _underlyingDefinition = underlying.getFunctionDefinition(); _underlyingCompiled = underlying; if (underlying instanceof FunctionInvoker) { _underlyingInvoker = (FunctionInvoker) underlying; } else { _underlyingInvoker = null; } } protected MissingInputsFunction(final FunctionInvoker underlying) { ArgumentChecker.notNull(underlying, "underlying"); _underlyingDefinition = null; _underlyingCompiled = null; _underlyingInvoker = underlying; } protected MissingInputsFunction create(final CompiledFunctionDefinition underlying) { return new MissingInputsFunction(underlying); } protected MissingInputsFunction create(final FunctionInvoker underlying) { return new MissingInputsFunction(underlying); } protected FunctionDefinition getUnderlyingDefinition() { return _underlyingDefinition; } protected CompiledFunctionDefinition getUnderlyingCompiled() { return _underlyingCompiled; } protected FunctionInvoker getUnderlyingInvoker() { return _underlyingInvoker; } protected String getAggregationStyleMissing() { return AGGREGATION_STYLE_MISSING; } protected String getAggregationStyleFull() { return AGGREGATION_STYLE_FULL; } // AbstractFunction @Override public void setUniqueId(final String identifier) { if (getUnderlyingDefinition() instanceof AbstractFunction) { ((AbstractFunction) getUnderlyingDefinition()).setUniqueId(identifier); } super.setUniqueId(identifier); } // FunctionDefinition @Override public void init(final FunctionCompilationContext context) { getUnderlyingDefinition().init(context); } @Override public CompiledFunctionDefinition compile(final FunctionCompilationContext context, final InstantProvider atInstant) { final CompiledFunctionDefinition underlying = getUnderlyingDefinition().compile(context, atInstant); if (underlying == getUnderlyingCompiled()) { s_logger.debug("Compiling underlying on {} gives self", this); return this; } else { s_logger.debug("Creating delegate for compiled underlying on {}", this); return create(underlying); } } @Override public String getShortName() { return getUnderlyingDefinition().getShortName(); } @Override public FunctionParameters getDefaultParameters() { return getUnderlyingDefinition().getDefaultParameters(); } // CompiledFunctionDefinition @Override public FunctionDefinition getFunctionDefinition() { return this; } @Override public ComputationTargetType getTargetType() { return getUnderlyingCompiled().getTargetType(); } @Override public boolean canApplyTo(final FunctionCompilationContext context, final ComputationTarget target) { return getUnderlyingCompiled().canApplyTo(context, target); } @Override public Set<ValueSpecification> getResults(final FunctionCompilationContext context, final ComputationTarget target) { final Set<ValueSpecification> underlyingResults = getUnderlyingCompiled().getResults(context, target); if (underlyingResults == null) { s_logger.debug("Underlying returned null for target {}", target); return null; } final Set<ValueSpecification> results = Sets.newHashSetWithExpectedSize(underlyingResults.size()); for (ValueSpecification underlyingResult : underlyingResults) { final ValueProperties.Builder properties = underlyingResult.getProperties().copy(); properties.with(ValuePropertyNames.AGGREGATION, getAggregationStyleFull(), getAggregationStyleMissing()); results.add(new ValueSpecification(underlyingResult.getValueName(), underlyingResult.getTargetSpecification(), properties.get())); } s_logger.debug("Returning results {}", results); return results; } @Override public Set<ValueRequirement> getRequirements(final FunctionCompilationContext context, final ComputationTarget target, final ValueRequirement desiredValue) { // User must have requested our aggregation style final ValueProperties resultConstraints = desiredValue.getConstraints(); final Set<String> aggregationStyle = resultConstraints.getValues(ValuePropertyNames.AGGREGATION); if (aggregationStyle == null) { s_logger.debug("No aggregation requirements on {}", desiredValue); return null; } // Requirement has all constraints asked of us (minus the aggregation style) final ValueProperties requirementConstraints = resultConstraints.withoutAny(ValuePropertyNames.AGGREGATION); final Set<ValueRequirement> requirements = getUnderlyingCompiled().getRequirements(context, target, new ValueRequirement(desiredValue.getValueName(), desiredValue.getTargetSpecification(), requirementConstraints)); s_logger.debug("Returning requirements {} for {}", requirements, desiredValue); return requirements; } @Override public boolean canHandleMissingRequirements() { return getUnderlyingCompiled().canHandleMissingRequirements(); } @Override public Set<ValueSpecification> getResults(final FunctionCompilationContext context, final ComputationTarget target, final Map<ValueSpecification, ValueRequirement> inputs) { final Set<ValueSpecification> underlyingResults = getUnderlyingCompiled().getResults(context, target, inputs); if (underlyingResults == null) { s_logger.debug("Underlying returned null inputs {}", inputs); return null; } final Set<ValueSpecification> results = Sets.newHashSetWithExpectedSize(underlyingResults.size() * 2); for (ValueSpecification underlyingResult : underlyingResults) { final ValueProperties properties = underlyingResult.getProperties(); if ((properties.getProperties() != null) && properties.getProperties().isEmpty()) { results.add(underlyingResult); } else { final ValueProperties.Builder builder = properties.copy(); builder.with(ValuePropertyNames.AGGREGATION, getAggregationStyleFull()); results.add(new ValueSpecification(underlyingResult.getValueName(), underlyingResult.getTargetSpecification(), builder.get())); builder.withoutAny(ValuePropertyNames.AGGREGATION).with(ValuePropertyNames.AGGREGATION, getAggregationStyleMissing()); results.add(new ValueSpecification(underlyingResult.getValueName(), underlyingResult.getTargetSpecification(), builder.get())); } } s_logger.debug("Returning results {} for {}", results, inputs); return results; } @Override public Set<ValueRequirement> getAdditionalRequirements(final FunctionCompilationContext context, final ComputationTarget target, final Set<ValueSpecification> inputs, Set<ValueSpecification> outputs) { final Set<ValueSpecification> underlyingOutputs = Sets.newHashSetWithExpectedSize(outputs.size()); for (ValueSpecification output : outputs) { final ValueProperties properties = output.getProperties().withoutAny(ValuePropertyNames.AGGREGATION); underlyingOutputs.add(new ValueSpecification(output.getValueName(), output.getTargetSpecification(), properties)); } return getUnderlyingCompiled().getAdditionalRequirements(context, target, inputs, underlyingOutputs); } @Override public Instant getEarliestInvocationTime() { return getUnderlyingCompiled().getEarliestInvocationTime(); } @Override public Instant getLatestInvocationTime() { return getUnderlyingCompiled().getLatestInvocationTime(); } @Override public FunctionInvoker getFunctionInvoker() { final FunctionInvoker underlying = getUnderlyingCompiled().getFunctionInvoker(); if (underlying == getUnderlyingInvoker()) { return this; } else { return create(underlying); } } // FunctionInvoker @Override public Set<ComputedValue> execute(final FunctionExecutionContext executionContext, final FunctionInputs inputs, final ComputationTarget target, final Set<ValueRequirement> desiredValues) { final Set<ValueRequirement> underlyingDesired = Sets.newHashSetWithExpectedSize(desiredValues.size()); for (ValueRequirement desiredValue : desiredValues) { final ValueProperties requirementConstraints = desiredValue.getConstraints().withoutAny(ValuePropertyNames.AGGREGATION); underlyingDesired.add(new ValueRequirement(desiredValue.getValueName(), desiredValue.getTargetSpecification(), requirementConstraints)); } final Set<ComputedValue> underlyingResults = getUnderlyingInvoker().execute(executionContext, inputs, target, underlyingDesired); if (underlyingResults == null) { return Collections.emptySet(); } final Set<ComputedValue> results = Sets.newHashSetWithExpectedSize(underlyingResults.size()); for (ComputedValue underlyingResult : results) { final ValueSpecification resultSpec = underlyingResult.getSpecification(); final ValueProperties.Builder properties = resultSpec.getProperties().copy(); properties.with(ValuePropertyNames.AGGREGATION, getAggregationStyleMissing()); results.add(new ComputedValue(new ValueSpecification(resultSpec.getValueName(), resultSpec.getTargetSpecification(), properties.get()), underlyingResult.getValue())); if (inputs.getMissingValues().isEmpty()) { properties.withoutAny(ValuePropertyNames.AGGREGATION).with(ValuePropertyNames.AGGREGATION, getAggregationStyleFull()); results.add(new ComputedValue(new ValueSpecification(resultSpec.getValueName(), resultSpec.getTargetSpecification(), properties.get()), underlyingResult.getValue())); } } return results; } @Override public boolean canHandleMissingInputs() { return true; } }
Correct MissingInputsFunction wrapper.
projects/OG-Financial/src/com/opengamma/financial/analytics/MissingInputsFunction.java
Correct MissingInputsFunction wrapper.
<ide><path>rojects/OG-Financial/src/com/opengamma/financial/analytics/MissingInputsFunction.java <ide> return Collections.emptySet(); <ide> } <ide> final Set<ComputedValue> results = Sets.newHashSetWithExpectedSize(underlyingResults.size()); <del> for (ComputedValue underlyingResult : results) { <add> for (ComputedValue underlyingResult : underlyingResults) { <ide> final ValueSpecification resultSpec = underlyingResult.getSpecification(); <ide> final ValueProperties.Builder properties = resultSpec.getProperties().copy(); <ide> properties.with(ValuePropertyNames.AGGREGATION, getAggregationStyleMissing());
JavaScript
lgpl-2.1
328248057f5aba885cea174d9126fd5a12e13d7c
0
enaeseth/loki,enaeseth/loki,enaeseth/loki
/** * Declares instance variables. * * @constructor * * @class A class for helping insert an hr. Contains code * common to both the button and the menu item. */ UI.HR_Helper = function() { var self = this; Util.OOP.inherits(self, UI.Helper); this.init = function(loki) { this.superclass.init.call(this, loki); this._masseuse = (new UI.HR_Masseuse).init(this._loki); return this; }; this.is_selected = function() { return !!_get_selected_hr(); }; var _get_selected_hr = function() { var sel = Util.Selection.get_selection(self._loki.window); var rng = Util.Range.create_range(sel); return Util.Range.get_nearest_ancestor_element_by_tag_name(rng, 'HR'); }; this.insert_hr = function() { var sel = Util.Selection.get_selection(self._loki.window); var hr = self._loki.document.createElement('HR'); Util.Selection.paste_node(sel, self._masseuse.wrap(hr)); //Util.Selection.select_node(sel, hr); //Util.Selection.collapse(sel, false); window.focus(); self._loki.window.focus(); }; this.remove_hr = function() { var sel = Util.Selection.get_selection(self._loki.window); var rng = Util.Range.create_range(sel); var hr = Util.Range.get_nearest_ancestor_element_by_tag_name(rng, 'HR'); var target = self._removal_target(hr); // Move cursor Util.Selection.select_node(sel, target); Util.Selection.collapse(sel, false); // to end self._loki.window.focus(); if ( target.parentNode != null ) target.parentNode.removeChild(target); }; this._removal_target = function(hr) { var p = hr.parentNode; return (Util.Node.is_tag(p, 'DIV') && 'hr' == p.getAttribute('loki:container')) ? p : hr; }; };
js/UI.HR_Helper.js
/** * Declares instance variables. * * @constructor * * @class A class for helping insert an hr. Contains code * common to both the button and the menu item. */ UI.HR_Helper = function() { var self = this; Util.OOP.inherits(self, UI.Helper); this.init = function(loki) { this.superclass.init.call(this, loki); this._masseuse = (new UI.HR_Masseuse).init(this._loki); return this; }; this.is_selected = function() { return self._get_selected_hr() != null; }; var _get_selected_hr = function() { var sel = Util.Selection.get_selection(self._loki.window); var rng = Util.Range.create_range(sel); return Util.Range.get_nearest_ancestor_element_by_tag_name(rng, 'HR'); }; this.insert_hr = function() { var sel = Util.Selection.get_selection(self._loki.window); var hr = self._loki.document.createElement('HR'); Util.Selection.paste_node(sel, self._masseuse.wrap(hr)); //Util.Selection.select_node(sel, hr); //Util.Selection.collapse(sel, false); window.focus(); self._loki.window.focus(); }; this.remove_hr = function() { var sel = Util.Selection.get_selection(self._loki.window); var rng = Util.Range.create_range(sel); var hr = Util.Range.get_nearest_ancestor_element_by_tag_name(rng, 'HR'); var target = self._removal_target(hr); // Move cursor Util.Selection.select_node(sel, target); Util.Selection.collapse(sel, false); // to end self._loki.window.focus(); if ( target.parentNode != null ) target.parentNode.removeChild(target); }; this._removal_target = function(hr) { var p = hr.parentNode; return (Util.Node.is_tag(p, 'DIV') && 'hr' == p.getAttribute('loki:container')) ? p : hr; }; };
Fixed a bug in the HR helper where it thought a function was declared on the object when it was actually in the constructor's closure.
js/UI.HR_Helper.js
Fixed a bug in the HR helper where it thought a function was declared on the object when it was actually in the constructor's closure.
<ide><path>s/UI.HR_Helper.js <ide> <ide> this.is_selected = function() <ide> { <del> return self._get_selected_hr() != null; <add> return !!_get_selected_hr(); <ide> }; <ide> <ide> var _get_selected_hr = function()
Java
mit
error: pathspec 'src/fr/loicdelorme/followUpYourGarden/core/services/exceptions/MissingGroupOfPlantsTypesOfPlantsException.java' did not match any file(s) known to git
93249562461531400233392aa321d7779e3befae
1
LoicDelorme/followUpYourGarden,LoicDelorme/Follow-Up-Your-Garden
package fr.loicdelorme.followUpYourGarden.core.services.exceptions; /** * This exception is thrown if the typesOfPlants attribute is missing. * * @author DELORME Loïc * @version 1.0.0 */ @SuppressWarnings("serial") public class MissingGroupOfPlantsTypesOfPlantsException extends Exception { /** * This exception is built when the types of plants attribute is missing. */ public MissingGroupOfPlantsTypesOfPlantsException() { super("The types of plants is missing!"); } }
src/fr/loicdelorme/followUpYourGarden/core/services/exceptions/MissingGroupOfPlantsTypesOfPlantsException.java
[add] exception.
src/fr/loicdelorme/followUpYourGarden/core/services/exceptions/MissingGroupOfPlantsTypesOfPlantsException.java
[add] exception.
<ide><path>rc/fr/loicdelorme/followUpYourGarden/core/services/exceptions/MissingGroupOfPlantsTypesOfPlantsException.java <add>package fr.loicdelorme.followUpYourGarden.core.services.exceptions; <add> <add>/** <add> * This exception is thrown if the typesOfPlants attribute is missing. <add> * <add> * @author DELORME Loïc <add> * @version 1.0.0 <add> */ <add>@SuppressWarnings("serial") <add>public class MissingGroupOfPlantsTypesOfPlantsException extends Exception <add>{ <add> /** <add> * This exception is built when the types of plants attribute is missing. <add> */ <add> public MissingGroupOfPlantsTypesOfPlantsException() <add> { <add> super("The types of plants is missing!"); <add> } <add>}
Java
apache-2.0
6d21c1e7a983cfbb519ab192f67ded30370e43b9
0
psiinon/zaproxy,zaproxy/zaproxy,Ali-Razmjoo/zaproxy,thc202/zaproxy,psiinon/zaproxy,psiinon/zaproxy,meitar/zaproxy,Ali-Razmjoo/zaproxy,meitar/zaproxy,gmaran23/zaproxy,zaproxy/zaproxy,meitar/zaproxy,gmaran23/zaproxy,kingthorin/zaproxy,zaproxy/zaproxy,Ali-Razmjoo/zaproxy,thc202/zaproxy,meitar/zaproxy,zaproxy/zaproxy,Ali-Razmjoo/zaproxy,Ali-Razmjoo/zaproxy,psiinon/zaproxy,thc202/zaproxy,zaproxy/zaproxy,gmaran23/zaproxy,meitar/zaproxy,zaproxy/zaproxy,gmaran23/zaproxy,meitar/zaproxy,kingthorin/zaproxy,kingthorin/zaproxy,zaproxy/zaproxy,thc202/zaproxy,thc202/zaproxy,kingthorin/zaproxy,thc202/zaproxy,psiinon/zaproxy,meitar/zaproxy,gmaran23/zaproxy,kingthorin/zaproxy,gmaran23/zaproxy,kingthorin/zaproxy,psiinon/zaproxy,thc202/zaproxy,Ali-Razmjoo/zaproxy,meitar/zaproxy,psiinon/zaproxy,kingthorin/zaproxy,Ali-Razmjoo/zaproxy,gmaran23/zaproxy
/* * Created on May 30, 2004 * * Paros and its related class files. * * Paros is an HTTP/HTTPS proxy for assessing web application security. * Copyright (C) 2003-2004 Chinotec Technologies Company * * This program is free software; you can redistribute it and/or * modify it under the terms of the Clarified Artistic License * as published by the Free Software Foundation. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * Clarified Artistic License for more details. * * You should have received a copy of the Clarified Artistic License * along with this program; if not, write to the Free Software * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. */ // ZAP: 2011/05/15 Support for exclusions // ZAP: 2012/11/01 Issue 411: Allow proxy port to be specified on the command line // ZAP: 2012/12/27 Added method addPersistentConnectionListener(...) // ZAP: 2013/01/25 Added method removeProxyListener() // ZAP: 2013/08/30 Issue 775: Allow host to be set via the command line // ZAP: 2014/03/23 Issue 1022: Proxy - Allow to override a proxied message // ZAP: 2015/01/04 Issue 1387: Unable to change the proxy's port/address if the port/address was // specified through the command line // ZAP: 2015/11/04 Issue 1920: Report the host:port ZAP is listening on in daemon mode, or exit if // it cant // ZAP: 2016/05/30 Issue 2494: ZAP Proxy is not showing the HTTP CONNECT Request in history tab // ZAP: 2017/03/15 Enable API // ZAP: 2017/11/06 Removed ProxyServerSSL (Issue 3983) // ZAP: 2019/06/01 Normalise line endings. // ZAP: 2019/06/05 Normalise format/style. // ZAP: 2019/09/17 Remove irrelevant conditional in stopServer(). package org.parosproxy.paros.control; import java.util.List; import org.parosproxy.paros.core.proxy.CacheProcessingItem; import org.parosproxy.paros.core.proxy.ConnectRequestProxyListener; import org.parosproxy.paros.core.proxy.OverrideMessageProxyListener; import org.parosproxy.paros.core.proxy.ProxyListener; import org.parosproxy.paros.core.proxy.ProxyServer; import org.parosproxy.paros.model.Model; import org.zaproxy.zap.PersistentConnectionListener; import org.zaproxy.zap.control.ControlOverrides; public class Proxy { private Model model = null; private ProxyServer proxyServer = null; private boolean reverseProxy = false; private String reverseProxyHost = ""; private ControlOverrides overrides = null; public Proxy(Model model, ControlOverrides overrides) { this.model = model; this.overrides = overrides; proxyServer = new ProxyServer(); proxyServer.setEnableApi(true); } public boolean startServer() { // setProxyParam put in here so restart can reread param. proxyServer.setProxyParam(model.getOptionsParam().getProxyParam()); proxyServer.setConnectionParam(model.getOptionsParam().getConnectionParam()); if (model.getOptionsParam().getProxyParam().isUseReverseProxy()) { proxyServer.startServer( model.getOptionsParam().getProxyParam().getReverseProxyIp(), model.getOptionsParam().getProxyParam().getReverseProxyHttpPort(), false); } else { String proxyHost = null; int proxyPort = -1; if (this.overrides != null) { proxyHost = this.overrides.getProxyHost(); proxyPort = this.overrides.getProxyPort(); // Use overrides once. overrides = null; } if (proxyHost != null) { // Save the override in the configs model.getOptionsParam().getProxyParam().setProxyIp(proxyHost); } else { // ZAP: get the proxy IP as set without any check for nullable proxyHost = model.getOptionsParam().getProxyParam().getRawProxyIP(); } if (proxyPort > 0) { // Save the override in the configs model.getOptionsParam().getProxyParam().setProxyPort(proxyPort); } else { proxyPort = model.getOptionsParam().getProxyParam().getProxyPort(); } if (proxyServer.startServer(proxyHost, proxyPort, false) == -1) { return false; } } return true; } public void stopServer() { proxyServer.stopServer(); } public void setSerialize(boolean serialize) { proxyServer.setSerialize(serialize); } public void addProxyListener(ProxyListener listener) { proxyServer.addProxyListener(listener); } public void removeProxyListener(ProxyListener listener) { proxyServer.removeProxyListener(listener); } public void addOverrideMessageProxyListener(OverrideMessageProxyListener listener) { proxyServer.addOverrideMessageProxyListener(listener); } public void removeOverrideMessageProxyListener(OverrideMessageProxyListener listener) { proxyServer.removeOverrideMessageProxyListener(listener); } public void addPersistentConnectionListener(PersistentConnectionListener listener) { proxyServer.addPersistentConnectionListener(listener); } public void removePersistentConnectionListener(PersistentConnectionListener listener) { proxyServer.removePersistentConnectionListener(listener); } /** * Adds the given {@code listener}, that will be notified of the received CONNECT requests. * * @param listener the listener that will be added * @throws IllegalArgumentException if the given {@code listener} is {@code null}. * @since 2.5.0 */ public void addConnectRequestProxyListener(ConnectRequestProxyListener listener) { validateListenerNotNull(listener); proxyServer.addConnectRequestProxyListener(listener); } /** * Validates that the given {@code listener} is not {@code null}, throwing an {@code * IllegalArgumentException} if it is. * * @param listener the listener that will be validated * @throws IllegalArgumentException if the given {@code listener} is {@code null}. */ private static void validateListenerNotNull(Object listener) { if (listener == null) { throw new IllegalArgumentException("Parameter listener must not be null."); } } /** * Removes the given {@code listener}, to no longer be notified of the received CONNECT * requests. * * @param listener the listener that should be removed * @throws IllegalArgumentException if the given {@code listener} is {@code null}. * @since 2.5.0 */ public void removeConnectRequestProxyListener(ConnectRequestProxyListener listener) { validateListenerNotNull(listener); proxyServer.removeConnectRequestProxyListener(listener); } /** @return Returns the reverseProxy. */ public boolean isReverseProxy() { return reverseProxy; } /** @param reverseProxy The reverseProxy to set. */ public void setReverseProxy(boolean reverseProxy) { this.reverseProxy = reverseProxy; } /** @return Returns the reverseProxyHost. */ public String getReverseProxyHost() { return reverseProxyHost; } /** @param reverseProxyHost The reverseProxyHost to set. */ public void setReverseProxyHost(String reverseProxyHost) { this.reverseProxyHost = reverseProxyHost; } /** @param enableCacheProcessing The enableCacheProcessing to set. */ public void setEnableCacheProcessing(boolean enableCacheProcessing) { if (proxyServer != null) { proxyServer.setEnableCacheProcessing(enableCacheProcessing); } } public void addCacheProcessingList(CacheProcessingItem item) { if (proxyServer != null) { proxyServer.addCacheProcessingList(item); } } public void setIgnoreList(List<String> urls) { if (proxyServer != null) { proxyServer.setExcludeList(urls); } } }
zap/src/main/java/org/parosproxy/paros/control/Proxy.java
/* * Created on May 30, 2004 * * Paros and its related class files. * * Paros is an HTTP/HTTPS proxy for assessing web application security. * Copyright (C) 2003-2004 Chinotec Technologies Company * * This program is free software; you can redistribute it and/or * modify it under the terms of the Clarified Artistic License * as published by the Free Software Foundation. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * Clarified Artistic License for more details. * * You should have received a copy of the Clarified Artistic License * along with this program; if not, write to the Free Software * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. */ // ZAP: 2011/05/15 Support for exclusions // ZAP: 2012/11/01 Issue 411: Allow proxy port to be specified on the command line // ZAP: 2012/12/27 Added method addPersistentConnectionListener(...) // ZAP: 2013/01/25 Added method removeProxyListener() // ZAP: 2013/08/30 Issue 775: Allow host to be set via the command line // ZAP: 2014/03/23 Issue 1022: Proxy - Allow to override a proxied message // ZAP: 2015/01/04 Issue 1387: Unable to change the proxy's port/address if the port/address was // specified through the command line // ZAP: 2015/11/04 Issue 1920: Report the host:port ZAP is listening on in daemon mode, or exit if // it cant // ZAP: 2016/05/30 Issue 2494: ZAP Proxy is not showing the HTTP CONNECT Request in history tab // ZAP: 2017/03/15 Enable API // ZAP: 2017/11/06 Removed ProxyServerSSL (Issue 3983) // ZAP: 2019/06/01 Normalise line endings. // ZAP: 2019/06/05 Normalise format/style. package org.parosproxy.paros.control; import java.util.List; import org.parosproxy.paros.core.proxy.CacheProcessingItem; import org.parosproxy.paros.core.proxy.ConnectRequestProxyListener; import org.parosproxy.paros.core.proxy.OverrideMessageProxyListener; import org.parosproxy.paros.core.proxy.ProxyListener; import org.parosproxy.paros.core.proxy.ProxyServer; import org.parosproxy.paros.model.Model; import org.zaproxy.zap.PersistentConnectionListener; import org.zaproxy.zap.control.ControlOverrides; public class Proxy { private Model model = null; private ProxyServer proxyServer = null; private boolean reverseProxy = false; private String reverseProxyHost = ""; private ControlOverrides overrides = null; public Proxy(Model model, ControlOverrides overrides) { this.model = model; this.overrides = overrides; proxyServer = new ProxyServer(); proxyServer.setEnableApi(true); } public boolean startServer() { // setProxyParam put in here so restart can reread param. proxyServer.setProxyParam(model.getOptionsParam().getProxyParam()); proxyServer.setConnectionParam(model.getOptionsParam().getConnectionParam()); if (model.getOptionsParam().getProxyParam().isUseReverseProxy()) { proxyServer.startServer( model.getOptionsParam().getProxyParam().getReverseProxyIp(), model.getOptionsParam().getProxyParam().getReverseProxyHttpPort(), false); } else { String proxyHost = null; int proxyPort = -1; if (this.overrides != null) { proxyHost = this.overrides.getProxyHost(); proxyPort = this.overrides.getProxyPort(); // Use overrides once. overrides = null; } if (proxyHost != null) { // Save the override in the configs model.getOptionsParam().getProxyParam().setProxyIp(proxyHost); } else { // ZAP: get the proxy IP as set without any check for nullable proxyHost = model.getOptionsParam().getProxyParam().getRawProxyIP(); } if (proxyPort > 0) { // Save the override in the configs model.getOptionsParam().getProxyParam().setProxyPort(proxyPort); } else { proxyPort = model.getOptionsParam().getProxyParam().getProxyPort(); } if (proxyServer.startServer(proxyHost, proxyPort, false) == -1) { return false; } } return true; } public void stopServer() { if (model.getOptionsParam().getProxyParam().isUseReverseProxy()) { proxyServer.stopServer(); } else { proxyServer.stopServer(); } } public void setSerialize(boolean serialize) { proxyServer.setSerialize(serialize); } public void addProxyListener(ProxyListener listener) { proxyServer.addProxyListener(listener); } public void removeProxyListener(ProxyListener listener) { proxyServer.removeProxyListener(listener); } public void addOverrideMessageProxyListener(OverrideMessageProxyListener listener) { proxyServer.addOverrideMessageProxyListener(listener); } public void removeOverrideMessageProxyListener(OverrideMessageProxyListener listener) { proxyServer.removeOverrideMessageProxyListener(listener); } public void addPersistentConnectionListener(PersistentConnectionListener listener) { proxyServer.addPersistentConnectionListener(listener); } public void removePersistentConnectionListener(PersistentConnectionListener listener) { proxyServer.removePersistentConnectionListener(listener); } /** * Adds the given {@code listener}, that will be notified of the received CONNECT requests. * * @param listener the listener that will be added * @throws IllegalArgumentException if the given {@code listener} is {@code null}. * @since 2.5.0 */ public void addConnectRequestProxyListener(ConnectRequestProxyListener listener) { validateListenerNotNull(listener); proxyServer.addConnectRequestProxyListener(listener); } /** * Validates that the given {@code listener} is not {@code null}, throwing an {@code * IllegalArgumentException} if it is. * * @param listener the listener that will be validated * @throws IllegalArgumentException if the given {@code listener} is {@code null}. */ private static void validateListenerNotNull(Object listener) { if (listener == null) { throw new IllegalArgumentException("Parameter listener must not be null."); } } /** * Removes the given {@code listener}, to no longer be notified of the received CONNECT * requests. * * @param listener the listener that should be removed * @throws IllegalArgumentException if the given {@code listener} is {@code null}. * @since 2.5.0 */ public void removeConnectRequestProxyListener(ConnectRequestProxyListener listener) { validateListenerNotNull(listener); proxyServer.removeConnectRequestProxyListener(listener); } /** @return Returns the reverseProxy. */ public boolean isReverseProxy() { return reverseProxy; } /** @param reverseProxy The reverseProxy to set. */ public void setReverseProxy(boolean reverseProxy) { this.reverseProxy = reverseProxy; } /** @return Returns the reverseProxyHost. */ public String getReverseProxyHost() { return reverseProxyHost; } /** @param reverseProxyHost The reverseProxyHost to set. */ public void setReverseProxyHost(String reverseProxyHost) { this.reverseProxyHost = reverseProxyHost; } /** @param enableCacheProcessing The enableCacheProcessing to set. */ public void setEnableCacheProcessing(boolean enableCacheProcessing) { if (proxyServer != null) { proxyServer.setEnableCacheProcessing(enableCacheProcessing); } } public void addCacheProcessingList(CacheProcessingItem item) { if (proxyServer != null) { proxyServer.addCacheProcessingList(item); } } public void setIgnoreList(List<String> urls) { if (proxyServer != null) { proxyServer.setExcludeList(urls); } } }
Simplify Proxy.stopServer() Remove irrelevant conditional identified by sonarcloud (https://sonarcloud.io/project/issues?id=zaproxy_zaproxy&open=AW06pHPtwvaFA2N5WdJi&resolved=false&types=BUG) Signed-off-by: kingthorin <83ea41af2b636790c02743e48206546e1fa891e4@users.noreply.github.com> Signed-off-by: karan preet singh sasan <[email protected]>
zap/src/main/java/org/parosproxy/paros/control/Proxy.java
Simplify Proxy.stopServer()
<ide><path>ap/src/main/java/org/parosproxy/paros/control/Proxy.java <ide> // ZAP: 2017/11/06 Removed ProxyServerSSL (Issue 3983) <ide> // ZAP: 2019/06/01 Normalise line endings. <ide> // ZAP: 2019/06/05 Normalise format/style. <add>// ZAP: 2019/09/17 Remove irrelevant conditional in stopServer(). <ide> package org.parosproxy.paros.control; <ide> <ide> import java.util.List; <ide> } <ide> <ide> public void stopServer() { <del> if (model.getOptionsParam().getProxyParam().isUseReverseProxy()) { <del> proxyServer.stopServer(); <del> <del> } else { <del> proxyServer.stopServer(); <del> } <add> proxyServer.stopServer(); <ide> } <ide> <ide> public void setSerialize(boolean serialize) {
Java
apache-2.0
2244d947cbedaf43259a847263dc852cd618be85
0
mikepenz/FastAdapter,mikepenz/FastAdapter,mikepenz/FastAdapter,mikepenz/FastAdapter
package com.mikepenz.fastadapter; import android.os.Bundle; import android.support.v4.util.ArrayMap; import android.support.v7.widget.RecyclerView; import android.util.Log; import android.util.SparseIntArray; import android.view.MotionEvent; import android.view.View; import android.view.ViewGroup; import com.mikepenz.fastadapter.utils.AdapterUtil; import java.util.ArrayList; import java.util.HashSet; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.NavigableMap; import java.util.Set; import java.util.SortedSet; import java.util.TreeMap; import java.util.TreeSet; /** * Created by mikepenz on 27.12.15. */ public class FastAdapter<Item extends IItem> extends RecyclerView.Adapter<RecyclerView.ViewHolder> { protected static final String BUNDLE_SELECTIONS = "bundle_selections"; protected static final String BUNDLE_EXPANDED = "bundle_expanded"; // we remember all adapters //priority queue... final private ArrayMap<Integer, IAdapter<Item>> mAdapters = new ArrayMap<>(); // we remember all possible types so we can create a new view efficiently final private ArrayMap<Integer, Item> mTypeInstances = new ArrayMap<>(); // cache the sizes of the different adapters so we can access the items more performant final private NavigableMap<Integer, IAdapter<Item>> mAdapterSizes = new TreeMap<>(); // the total size private int mGlobalSize = 0; // if enabled we will select the item via a notifyItemChanged -> will animate with the Animator // you can also use this if you have any custom logic for selections, and do not depend on the "selected" state of the view // note if enabled it will feel a bit slower because it will animate the selection private boolean mSelectWithItemUpdate = false; // if we want multiSelect enabled private boolean mMultiSelect = false; // if we want the multiSelect only on longClick private boolean mSelectOnLongClick = false; // if a user can deselect a selection via click. required if there is always one selected item! private boolean mAllowDeselection = true; // if items are selectable in general private boolean mSelectable = false; // only one expanded section private boolean mOnlyOneExpandedItem = false; // if we use the positionBasedStateManagement or the "stateless" managment private boolean mPositionBasedStateManagement = false; // we need to remember all selections to recreate them after orientation change private SortedSet<Integer> mSelections = new TreeSet<>(); // we need to remember all expanded items to recreate them after orientation change private SparseIntArray mExpanded = new SparseIntArray(); // the listeners which can be hooked on an item private OnClickListener<Item> mOnPreClickListener; private OnClickListener<Item> mOnClickListener; private OnLongClickListener<Item> mOnPreLongClickListener; private OnLongClickListener<Item> mOnLongClickListener; private OnTouchListener<Item> mOnTouchListener; //the listeners for onCreateViewHolder or onBindViewHolder private OnCreateViewHolderListener mOnCreateViewHolderListener = new OnCreateViewHolderListenerImpl(); private OnBindViewHolderListener mOnBindViewHolderListener = new OnBindViewHolderListenerImpl(); /** * default CTOR */ public FastAdapter() { setHasStableIds(true); } /** * Define the OnClickListener which will be used for a single item * * @param onClickListener the OnClickListener which will be used for a single item * @return this */ public FastAdapter<Item> withOnClickListener(OnClickListener<Item> onClickListener) { this.mOnClickListener = onClickListener; return this; } /** * Define the OnPreClickListener which will be used for a single item and is called after all internal methods are done * * @param onPreClickListener the OnPreClickListener which will be called after a single item was clicked and all internal methods are done * @return this */ public FastAdapter<Item> withOnPreClickListener(OnClickListener<Item> onPreClickListener) { this.mOnPreClickListener = onPreClickListener; return this; } /** * Define the OnLongClickListener which will be used for a single item * * @param onLongClickListener the OnLongClickListener which will be used for a single item * @return this */ public FastAdapter<Item> withOnLongClickListener(OnLongClickListener<Item> onLongClickListener) { this.mOnLongClickListener = onLongClickListener; return this; } /** * Define the OnLongClickListener which will be used for a single item and is called after all internal methods are done * * @param onPreLongClickListener the OnLongClickListener which will be called after a single item was clicked and all internal methods are done * @return this */ public FastAdapter<Item> withOnPreLongClickListener(OnLongClickListener<Item> onPreLongClickListener) { this.mOnPreLongClickListener = onPreLongClickListener; return this; } /** * Define the TouchListener which will be used for a single item * * @param onTouchListener the TouchListener which will be used for a single item * @return this */ public FastAdapter<Item> withOnTouchListener(OnTouchListener<Item> onTouchListener) { this.mOnTouchListener = onTouchListener; return this; } /** * allows you to set a custom OnCreateViewHolderListener which will be used before and after the ViewHolder is created * You may check the OnCreateViewHolderListenerImpl for the default behavior * * @param onCreateViewHolderListener the OnCreateViewHolderListener (you may use the OnCreateViewHolderListenerImpl) */ public FastAdapter<Item> withOnCreateViewHolderListener(OnCreateViewHolderListener onCreateViewHolderListener) { this.mOnCreateViewHolderListener = onCreateViewHolderListener; return this; } /** * allows you to set an custom OnBindViewHolderListener which is used to bind the view. This will overwrite the libraries behavior. * You may check the OnBindViewHolderListenerImpl for the default behavior * * @param onBindViewHolderListener the OnBindViewHolderListener */ public FastAdapter<Item> withOnBindViewHolderListener(OnBindViewHolderListener onBindViewHolderListener) { this.mOnBindViewHolderListener = onBindViewHolderListener; return this; } /** * select between the different selection behaviors. * there are now 2 different variants of selection. you can toggle this via `withSelectWithItemUpdate(boolean)` (where false == default - variant 1) * 1.) direct selection via the view "selected" state, we also make sure we do not animate here so no notifyItemChanged is called if we repeatly press the same item * 2.) we select the items via a notifyItemChanged. this will allow custom selected logics within your views (isSelected() - do something...) and it will also animate the change via the provided itemAnimator. because of the animation of the itemAnimator the selection will have a small delay (time of animating) * * @param selectWithItemUpdate true if notifyItemChanged should be called upon select * @return this */ public FastAdapter<Item> withSelectWithItemUpdate(boolean selectWithItemUpdate) { this.mSelectWithItemUpdate = selectWithItemUpdate; return this; } /** * Enable this if you want multiSelection possible in the list * * @param multiSelect true to enable multiSelect * @return this */ public FastAdapter<Item> withMultiSelect(boolean multiSelect) { mMultiSelect = multiSelect; return this; } /** * Disable this if you want the selection on a single tap * * @param selectOnLongClick false to do select via single click * @return this */ public FastAdapter<Item> withSelectOnLongClick(boolean selectOnLongClick) { mSelectOnLongClick = selectOnLongClick; return this; } /** * If false, a user can't deselect an item via click (you can still do this programmatically) * * @param allowDeselection true if a user can deselect an already selected item via click * @return this */ public FastAdapter<Item> withAllowDeselection(boolean allowDeselection) { this.mAllowDeselection = allowDeselection; return this; } /** * set if no item is selectable * * @param selectable true if items are selectable * @return this */ public FastAdapter<Item> withSelectable(boolean selectable) { this.mSelectable = selectable; return this; } /** * set if we want to use the positionBasedStateManagement (high performant for lists up to Integer.MAX_INT) * set to false if you want to use the new stateManagement which will come with more flexibility (but worse performance on long lists) * * @param mPositionBasedStateManagement false to enable the alternative "stateLess" stateManagement * @return this */ public FastAdapter<Item> withPositionBasedStateManagement(boolean mPositionBasedStateManagement) { this.mPositionBasedStateManagement = mPositionBasedStateManagement; return this; } /** * @return if items are selectable */ public boolean isSelectable() { return mSelectable; } /** * @return if this FastAdapter is configured with the PositionBasedStateManagement */ public boolean isPositionBasedStateManagement() { return mPositionBasedStateManagement; } /** * set if there should only be one opened expandable item * DEFAULT: false * * @param mOnlyOneExpandedItem true if there should be only one expanded, expandable item in the list * @return this */ public FastAdapter<Item> withOnlyOneExpandedItem(boolean mOnlyOneExpandedItem) { this.mOnlyOneExpandedItem = mOnlyOneExpandedItem; return this; } /** * @return if there should be only one expanded, expandable item in the list */ public boolean isOnlyOneExpandedItem() { return mOnlyOneExpandedItem; } /** * re-selects all elements stored in the savedInstanceState * IMPORTANT! Call this method only after all items where added to the adapters again. Otherwise it may select wrong items! * * @param savedInstanceState If the activity is being re-initialized after * previously being shut down then this Bundle contains the data it most * recently supplied in Note: Otherwise it is null. * @return this */ public FastAdapter<Item> withSavedInstanceState(Bundle savedInstanceState) { return withSavedInstanceState(savedInstanceState, ""); } /** * re-selects all elements stored in the savedInstanceState * IMPORTANT! Call this method only after all items where added to the adapters again. Otherwise it may select wrong items! * * @param savedInstanceState If the activity is being re-initialized after * previously being shut down then this Bundle contains the data it most * recently supplied in Note: Otherwise it is null. * @param prefix a prefix added to the savedInstance key so we can store multiple states * @return this */ public FastAdapter<Item> withSavedInstanceState(Bundle savedInstanceState, String prefix) { if (savedInstanceState != null) { //make sure already done selections are removed deselect(); if (mPositionBasedStateManagement) { //first restore opened collasable items, as otherwise may not all selections could be restored int[] expandedItems = savedInstanceState.getIntArray(BUNDLE_EXPANDED + prefix); if (expandedItems != null) { for (Integer expandedItem : expandedItems) { expand(expandedItem); } } //restore the selections int[] selections = savedInstanceState.getIntArray(BUNDLE_SELECTIONS + prefix); if (selections != null) { for (Integer selection : selections) { select(selection); } } } else { ArrayList<String> expandedItems = savedInstanceState.getStringArrayList(BUNDLE_EXPANDED + prefix); ArrayList<String> selectedItems = savedInstanceState.getStringArrayList(BUNDLE_SELECTIONS + prefix); for (int i = 0; i < getItemCount(); i++) { Item item = getItem(i); String id = String.valueOf(item.getIdentifier()); if (expandedItems != null && expandedItems.contains(id)) { expand(i); } if (selectedItems != null && selectedItems.contains(id)) { select(i); } //we also have to restore the selections for subItems AdapterUtil.restoreSubItemSelectionStatesForAlternativeStateManagement(item, selectedItems); } } } return this; } /** * registers an AbstractAdapter which will be hooked into the adapter chain * * @param adapter an adapter which extends the AbstractAdapter */ public <A extends AbstractAdapter<Item>> void registerAdapter(A adapter) { if (!mAdapters.containsKey(adapter.getOrder())) { mAdapters.put(adapter.getOrder(), adapter); cacheSizes(); } } /** * register a new type into the TypeInstances to be able to efficiently create thew ViewHolders * * @param item an IItem which will be shown in the list */ public void registerTypeInstance(Item item) { if (!mTypeInstances.containsKey(item.getType())) { mTypeInstances.put(item.getType(), item); } } /** * gets the TypeInstance remembered within the FastAdapter for an item * * @param type the int type of the item * @return the Item typeInstance */ public Item getTypeInstance(int type) { return mTypeInstances.get(type); } /** * Creates the ViewHolder by the viewType * * @param parent the parent view (the RecyclerView) * @param viewType the current viewType which is bound * @return the ViewHolder with the bound data */ @Override public RecyclerView.ViewHolder onCreateViewHolder(ViewGroup parent, int viewType) { final RecyclerView.ViewHolder holder = mOnCreateViewHolderListener.onPreCreateViewHolder(parent, viewType); //handle click behavior holder.itemView.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { int pos = holder.getAdapterPosition(); if (pos != RecyclerView.NO_POSITION) { boolean consumed = false; RelativeInfo<Item> relativeInfo = getRelativeInfo(pos); Item item = relativeInfo.item; if (item != null && item.isEnabled()) { //on the very first we call the click listener from the item itself (if defined) if (item instanceof IClickable && ((IClickable) item).getOnPreItemClickListener() != null) { consumed = ((IClickable<Item>) item).getOnPreItemClickListener().onClick(v, relativeInfo.adapter, item, pos); } //first call the onPreClickListener which would allow to prevent executing of any following code, including selection if (!consumed && mOnPreClickListener != null) { consumed = mOnPreClickListener.onClick(v, relativeInfo.adapter, item, pos); } //if this is a expandable item :D if (!consumed && item instanceof IExpandable) { if (((IExpandable) item).getSubItems() != null) { toggleExpandable(pos); } } //if there should be only one expanded item we want to collapse all the others but the current one if (mOnlyOneExpandedItem) { int[] expandedItems = getExpandedItems(); for (int i = expandedItems.length - 1; i >= 0; i--) { if (expandedItems[i] != pos) { collapse(expandedItems[i], true); } } } //handle the selection if the event was not yet consumed, and we are allowed to select an item (only occurs when we select with long click only) if (!consumed && !mSelectOnLongClick && mSelectable) { handleSelection(v, item, pos); } //before calling the global adapter onClick listener call the item specific onClickListener if (item instanceof IClickable && ((IClickable) item).getOnItemClickListener() != null) { consumed = ((IClickable<Item>) item).getOnItemClickListener().onClick(v, relativeInfo.adapter, item, pos); } //call the normal click listener after selection was handlded if (!consumed && mOnClickListener != null) { mOnClickListener.onClick(v, relativeInfo.adapter, item, pos); } } } } }); //handle long click behavior holder.itemView.setOnLongClickListener(new View.OnLongClickListener() { @Override public boolean onLongClick(View v) { int pos = holder.getAdapterPosition(); if (pos != RecyclerView.NO_POSITION) { boolean consumed = false; RelativeInfo<Item> relativeInfo = getRelativeInfo(pos); if (relativeInfo.item != null && relativeInfo.item.isEnabled()) { //first call the OnPreLongClickListener which would allow to prevent executing of any following code, including selection if (mOnPreLongClickListener != null) { consumed = mOnPreLongClickListener.onLongClick(v, relativeInfo.adapter, relativeInfo.item, pos); } //now handle the selection if we are in multiSelect mode and allow selecting on longClick if (!consumed && mSelectOnLongClick && mSelectable) { handleSelection(v, relativeInfo.item, pos); } //call the normal long click listener after selection was handled if (mOnLongClickListener != null) { consumed = mOnLongClickListener.onLongClick(v, relativeInfo.adapter, relativeInfo.item, pos); } } return consumed; } return false; } }); //handle touch behavior holder.itemView.setOnTouchListener(new View.OnTouchListener() { @Override public boolean onTouch(View v, MotionEvent event) { if (mOnTouchListener != null) { int pos = holder.getAdapterPosition(); if (pos != RecyclerView.NO_POSITION) { RelativeInfo<Item> relativeInfo = getRelativeInfo(pos); return mOnTouchListener.onTouch(v, event, relativeInfo.adapter, relativeInfo.item, pos); } } return false; } }); return mOnCreateViewHolderListener.onPostCreateViewHolder(holder); } /** * Binds the data to the created ViewHolder and sets the listeners to the holder.itemView * * @param holder the viewHolder we bind the data on * @param position the global position */ @Override public void onBindViewHolder(final RecyclerView.ViewHolder holder, int position) { mOnBindViewHolderListener.onBindViewHolder(holder, position); } /** * Searches for the given item and calculates it's global position * * @param item the item which is searched for * @return the global position, or -1 if not found */ public int getPosition(Item item) { if (item.getIdentifier() == -1) { Log.e("FastAdapter", "You have to define an identifier for your item to retrieve the position via this method"); return -1; } int position = 0; int length = mAdapters.size(); for (int i = 0; i < length; i++) { IAdapter<Item> adapter = mAdapters.valueAt(i); if (adapter.getOrder() < 0) { continue; } int relativePosition = adapter.getAdapterPosition(item); if (relativePosition != -1) { return position + relativePosition; } position = adapter.getAdapterItemCount(); } return -1; } /** * gets the IItem by a position, from all registered adapters * * @param position the global position * @return the found IItem or null */ public Item getItem(int position) { //if we are out of range just return null if (position < 0 || position >= mGlobalSize) { return null; } //now get the adapter which is responsible for the given position Map.Entry<Integer, IAdapter<Item>> entry = mAdapterSizes.floorEntry(position); return entry.getValue().getAdapterItem(position - entry.getKey()); } /** * Internal method to get the Item as ItemHolder which comes with the relative position within it's adapter * Finds the responsible adapter for the given position * * @param position the global position * @return the adapter which is responsible for this position */ public RelativeInfo<Item> getRelativeInfo(int position) { if (position < 0) { return new RelativeInfo<>(); } RelativeInfo<Item> relativeInfo = new RelativeInfo<>(); Map.Entry<Integer, IAdapter<Item>> entry = mAdapterSizes.floorEntry(position); if (entry != null) { relativeInfo.item = entry.getValue().getAdapterItem(position - entry.getKey()); relativeInfo.adapter = entry.getValue(); relativeInfo.position = position; } return relativeInfo; } /** * Gets the adapter for the given position * * @param position the global position * @return the adapter responsible for this global position */ public IAdapter<Item> getAdapter(int position) { //if we are out of range just return null if (position < 0 || position >= mGlobalSize) { return null; } //now get the adapter which is responsible for the given position return mAdapterSizes.floorEntry(position).getValue(); } /** * finds the int ItemViewType from the IItem which exists at the given position * * @param position the global position * @return the viewType for this position */ @Override public int getItemViewType(int position) { return getItem(position).getType(); } /** * finds the int ItemId from the IItem which exists at the given position * * @param position the global position * @return the itemId for this position */ @Override public long getItemId(int position) { return getItem(position).getIdentifier(); } /** * calculates the total ItemCount over all registered adapters * * @return the global count */ public int getItemCount() { return mGlobalSize; } /** * calculates the item count up to a given (excluding this) order number * * @param order the number up to which the items are counted * @return the total count of items up to the adapter order */ public int getPreItemCountByOrder(int order) { //if we are empty just return 0 count if (mGlobalSize == 0) { return 0; } int size = 0; //count the number of items before the adapter with the given order for (IAdapter<Item> adapter : mAdapters.values()) { if (adapter.getOrder() == order) { return size; } else { size = size + adapter.getAdapterItemCount(); } } //get the count of items which are before this order return size; } /** * calculates the item count up to a given (excluding this) adapter (defined by the global position of the item) * * @param position the global position of an adapter item * @return the total count of items up to the adapter which holds the given position */ public int getPreItemCount(int position) { //if we are empty just return 0 count if (mGlobalSize == 0) { return 0; } //get the count of items which are before this order return mAdapterSizes.floorKey(position); } /** * calculates the count of expandable items before a given position * * @param from the global start position you should pass here the count of items of the previous adapters (or 0 if you want to start from the beginning) * @param position the global position * @return the count of expandable items before a given position */ public int getExpandedItemsCount(int from, int position) { int totalAddedItems = 0; if (mPositionBasedStateManagement) { int length = mExpanded.size(); for (int i = 0; i < length; i++) { //now we count the amount of expanded items within our range we check if (mExpanded.keyAt(i) >= from && mExpanded.keyAt(i) < position) { totalAddedItems = totalAddedItems + mExpanded.get(mExpanded.keyAt(i)); } else if (mExpanded.keyAt(i) >= position) { //we do not care about all expanded items which are outside our range break; } } } else { //first we find out how many items were added in total //also counting subItems for (int i = from; i < position; i++) { Item tmp = getItem(i); if (tmp instanceof IExpandable) { IExpandable tmpExpandable = ((IExpandable) tmp); if (tmpExpandable.getSubItems() != null && tmpExpandable.isExpanded()) { totalAddedItems = totalAddedItems + tmpExpandable.getSubItems().size(); } } } } return totalAddedItems; } /** * add the values to the bundle for saveInstanceState * * @param savedInstanceState If the activity is being re-initialized after * previously being shut down then this Bundle contains the data it most * recently supplied in Note: Otherwise it is null. * @return the passed bundle with the newly added data */ public Bundle saveInstanceState(Bundle savedInstanceState) { return saveInstanceState(savedInstanceState, ""); } /** * add the values to the bundle for saveInstanceState * * @param savedInstanceState If the activity is being re-initialized after * previously being shut down then this Bundle contains the data it most * recently supplied in Note: Otherwise it is null. * @param prefix a prefix added to the savedInstance key so we can store multiple states * @return the passed bundle with the newly added data */ public Bundle saveInstanceState(Bundle savedInstanceState, String prefix) { if (savedInstanceState != null) { if (mPositionBasedStateManagement) { //remember the selections int[] selections = new int[mSelections.size()]; int index = 0; for (Integer selection : mSelections) { selections[index] = selection; index++; } savedInstanceState.putIntArray(BUNDLE_SELECTIONS + prefix, selections); //remember the collapsed states savedInstanceState.putIntArray(BUNDLE_EXPANDED + prefix, getExpandedItems()); } else { ArrayList<String> selections = new ArrayList<>(); ArrayList<String> expandedItems = new ArrayList<>(); int length = getItemCount(); for (int i = 0; i < length; i++) { Item item = getItem(i); if (item instanceof IExpandable && ((IExpandable) item).isExpanded()) { expandedItems.add(String.valueOf(item.getIdentifier())); } if (item.isSelected()) { selections.add(String.valueOf(item.getIdentifier())); } //we also have to find all selections in the sub hirachies AdapterUtil.findSubItemSelections(item, selections); } //remember the selections savedInstanceState.putStringArrayList(BUNDLE_SELECTIONS + prefix, selections); //remember the collapsed states savedInstanceState.putStringArrayList(BUNDLE_EXPANDED + prefix, expandedItems); } } return savedInstanceState; } /** * we cache the sizes of our adapters so get accesses are faster */ private void cacheSizes() { mAdapterSizes.clear(); int size = 0; //we also have to add this for the first adapter otherwise the floorKey method will return the wrong value if (mAdapters.size() > 0) { mAdapterSizes.put(0, mAdapters.valueAt(0)); } for (IAdapter<Item> adapter : mAdapters.values()) { if (adapter.getAdapterItemCount() > 0) { mAdapterSizes.put(size, adapter); size = size + adapter.getAdapterItemCount(); } } mGlobalSize = size; } //------------------------- //------------------------- //Selection stuff //------------------------- //------------------------- /** * @return a set with the global positions of all selected items */ public Set<Integer> getSelections() { if (mPositionBasedStateManagement) { return mSelections; } else { Set<Integer> selections = new HashSet<>(); int length = getItemCount(); for (int i = 0; i < length; i++) { Item item = getItem(i); if (item.isSelected()) { selections.add(i); } } return selections; } } /** * @return a set with the items which are currently selected */ public Set<Item> getSelectedItems() { Set<Item> items = new HashSet<>(); for (Integer position : getSelections()) { items.add(getItem(position)); } return items; } /** * toggles the selection of the item at the given position * * @param position the global position */ public void toggleSelection(int position) { if (mPositionBasedStateManagement) { if (mSelections.contains(position)) { deselect(position); } else { select(position); } } else { if (getItem(position).isSelected()) { deselect(position); } else { select(position); } } } /** * handles the selection and deselects item if multiSelect is disabled * * @param position the global position */ private void handleSelection(View view, Item item, int position) { //if this item is not selectable don't continue if (!item.isSelectable()) { return; } //if we have disabled deselection via click don't continue if (item.isSelected() && !mAllowDeselection) { return; } boolean selected = mSelections.contains(position); if (mSelectWithItemUpdate || view == null) { if (!mMultiSelect) { deselect(); } if (selected) { deselect(position); } else { select(position); } } else { if (!mMultiSelect) { //we have to separately handle deselection here because if we toggle the current item we do not want to deselect this first! if (mPositionBasedStateManagement) { Iterator<Integer> entries = mSelections.iterator(); while (entries.hasNext()) { //deselect all but the current one! this is important! Integer pos = entries.next(); if (pos != position) { deselect(pos, entries); } } } else { Set<Integer> selections = getSelections(); for (int pos : selections) { if (pos != position) { deselect(pos); } } } } //we toggle the state of the view item.withSetSelected(!selected); view.setSelected(!selected); //now we make sure we remember the selection! if (mPositionBasedStateManagement) { if (selected) { if (mSelections.contains(position)) { mSelections.remove(position); } } else { mSelections.add(position); } } } } /** * selects all items at the positions in the iteratable * * @param positions the global positions to select */ public void select(Iterable<Integer> positions) { for (Integer position : positions) { select(position); } } /** * selects an item and remembers it's position in the selections list * * @param position the global position */ public void select(int position) { select(position, false); } /** * selects an item and remembers it's position in the selections list * * @param position the global position * @param fireEvent true if the onClick listener should be called */ public void select(int position, boolean fireEvent) { Item item = getItem(position); if (item != null) { item.withSetSelected(true); if (mPositionBasedStateManagement) { mSelections.add(position); } } notifyItemChanged(position); if (mOnClickListener != null && fireEvent) { mOnClickListener.onClick(null, getAdapter(position), item, position); } } /** * deselects all selections */ public void deselect() { if (mPositionBasedStateManagement) { deselect(mSelections); } else { deselect(getSelections()); } } /** * deselects all items at the positions in the iteratable * * @param positions the global positions to deselect */ public void deselect(Iterable<Integer> positions) { Iterator<Integer> entries = positions.iterator(); while (entries.hasNext()) { deselect(entries.next(), entries); } } /** * deselects an item and removes it's position in the selections list * * @param position the global position */ public void deselect(int position) { deselect(position, null); } /** * deselects an item and removes it's position in the selections list * also takes an iterator to remove items from the map * * @param position the global position * @param entries the iterator which is used to deselect all */ private void deselect(int position, Iterator<Integer> entries) { Item item = getItem(position); if (item != null) { item.withSetSelected(false); } if (entries == null) { if (mPositionBasedStateManagement) { if (mSelections.contains(position)) { mSelections.remove(position); } } } else { entries.remove(); } notifyItemChanged(position); } /** * deletes all current selected items * * @return a list of the IItem elements which were deleted */ public List<Item> deleteAllSelectedItems() { List<Item> deletedItems = new LinkedList<>(); //we have to re-fetch the selections array again and again as the position will change after one item is deleted if (mPositionBasedStateManagement) { Set<Integer> selections = getSelections(); while (selections.size() > 0) { Iterator<Integer> iterator = selections.iterator(); int position = iterator.next(); IAdapter adapter = getAdapter(position); if (adapter != null && adapter instanceof IItemAdapter) { deletedItems.add(getItem(position)); ((IItemAdapter) adapter).remove(position); } else { iterator.remove(); } selections = getSelections(); } } else { int length = getItemCount(); for (int i = length - 1; i >= 0; i--) { RelativeInfo<Item> ri = getRelativeInfo(i); if (ri.item.isSelected()) { if (ri.adapter != null && ri.adapter instanceof IItemAdapter) { ((IItemAdapter) ri.adapter).remove(i); } } } } return deletedItems; } //------------------------- //------------------------- //Expandable stuff //------------------------- //------------------------- /** * returns the expanded items this contains position and the count of items * which are expanded by this position * * @return the expanded items */ public SparseIntArray getExpanded() { if (mPositionBasedStateManagement) { return mExpanded; } else { SparseIntArray expandedItems = new SparseIntArray(); int length = getItemCount(); for (int i = 0; i < length; i++) { Item item = getItem(i); if (item instanceof IExpandable && ((IExpandable) item).isExpanded()) { expandedItems.put(i, ((IExpandable) item).getSubItems().size()); } } return expandedItems; } } /** * @return a set with the global positions of all expanded items */ public int[] getExpandedItems() { int[] expandedItems; if (mPositionBasedStateManagement) { int length = mExpanded.size(); expandedItems = new int[length]; for (int i = 0; i < length; i++) { expandedItems[i] = mExpanded.keyAt(i); } } else { ArrayList<Integer> expandedItemsList = new ArrayList<>(); int length = getItemCount(); for (int i = 0; i < length; i++) { Item item = getItem(i); if (item instanceof IExpandable && ((IExpandable) item).isExpanded()) { expandedItemsList.add(i); } } int expandedItemsListLength = expandedItemsList.size(); expandedItems = new int[expandedItemsListLength]; for (int i = 0; i < expandedItemsListLength; i++) { expandedItems[i] = expandedItemsList.get(i); } } return expandedItems; } /** * toggles the expanded state of the given expandable item at the given position * * @param position the global position */ public void toggleExpandable(int position) { if (mPositionBasedStateManagement) { if (mExpanded.indexOfKey(position) >= 0) { collapse(position); } else { expand(position); } } else { Item item = getItem(position); if (item instanceof IExpandable && ((IExpandable) item).isExpanded()) { collapse(position); } else { expand(position); } } } /** * collapses all expanded items */ public void collapse() { collapse(true); } /** * collapses all expanded items * * @param notifyItemChanged true if we need to call notifyItemChanged. DEFAULT: false */ public void collapse(boolean notifyItemChanged) { int[] expandedItems = getExpandedItems(); for (int i = expandedItems.length - 1; i >= 0; i--) { collapse(expandedItems[i], notifyItemChanged); } } /** * collapses (closes) the given collapsible item at the given position * * @param position the global position */ public void collapse(int position) { collapse(position, false); } /** * collapses (closes) the given collapsible item at the given position * * @param position the global position * @param notifyItemChanged true if we need to call notifyItemChanged. DEFAULT: false */ public void collapse(int position, boolean notifyItemChanged) { Item item = getItem(position); if (item != null && item instanceof IExpandable) { IExpandable expandable = (IExpandable) item; //as we now know the item we will collapse we can collapse all subitems //if this item is not already collapsed and has sub items we go on if (expandable.isExpanded() && expandable.getSubItems() != null && expandable.getSubItems().size() > 0) { if (mPositionBasedStateManagement) { //first we find out how many items were added in total int totalAddedItems = expandable.getSubItems().size(); int length = mExpanded.size(); for (int i = 0; i < length; i++) { if (mExpanded.keyAt(i) > position && mExpanded.keyAt(i) <= position + totalAddedItems) { totalAddedItems = totalAddedItems + mExpanded.get(mExpanded.keyAt(i)); } } //we will deselect starting with the lowest one Iterator<Integer> selectionsIterator = mSelections.iterator(); while (selectionsIterator.hasNext()) { Integer value = selectionsIterator.next(); if (value > position && value <= position + totalAddedItems) { deselect(value, selectionsIterator); } } //now we start to collapse them for (int i = length - 1; i >= 0; i--) { if (mExpanded.keyAt(i) > position && mExpanded.keyAt(i) <= position + totalAddedItems) { //we collapsed those items now we remove update the added items totalAddedItems = totalAddedItems - mExpanded.get(mExpanded.keyAt(i)); //we collapse the item internalCollapse(mExpanded.keyAt(i), notifyItemChanged); } } //we collapse our root element internalCollapse(expandable, position, notifyItemChanged); } else { //first we find out how many items were added in total //also counting subitems int totalAddedItems = expandable.getSubItems().size(); for (int i = position + 1; i < position + totalAddedItems; i++) { Item tmp = getItem(i); if (tmp instanceof IExpandable) { IExpandable tmpExpandable = ((IExpandable) tmp); if (tmpExpandable.getSubItems() != null && tmpExpandable.isExpanded()) { totalAddedItems = totalAddedItems + tmpExpandable.getSubItems().size(); } } } //why... WHY?! for (int i = position + totalAddedItems - 1; i > position; i--) { Item tmp = getItem(i); if (tmp instanceof IExpandable) { IExpandable tmpExpandable = ((IExpandable) tmp); if (tmpExpandable.isExpanded()) { collapse(i); if (tmpExpandable.getSubItems() != null) { i = i - tmpExpandable.getSubItems().size(); } } } } //we collapse our root element internalCollapse(expandable, position, notifyItemChanged); } } } } private void internalCollapse(int position, boolean notifyItemChanged) { Item item = getItem(position); if (item != null && item instanceof IExpandable) { IExpandable expandable = (IExpandable) item; //if this item is not already collapsed and has sub items we go on if (expandable.isExpanded() && expandable.getSubItems() != null && expandable.getSubItems().size() > 0) { internalCollapse(expandable, position, notifyItemChanged); } } } private void internalCollapse(IExpandable expandable, int position, boolean notifyItemChanged) { IAdapter adapter = getAdapter(position); if (adapter != null && adapter instanceof IItemAdapter) { ((IItemAdapter) adapter).removeRange(position + 1, expandable.getSubItems().size()); } //remember that this item is now collapsed again expandable.withIsExpanded(false); //remove the information that this item was opened if (mPositionBasedStateManagement) { int indexOfKey = mExpanded.indexOfKey(position); if (indexOfKey >= 0) { mExpanded.removeAt(indexOfKey); } } //we need to notify to get the correct drawable if there is one showing the current state if (notifyItemChanged) { notifyItemChanged(position); } } /** * opens the expandable item at the given position * * @param position the global position */ public void expand(int position) { expand(position, false); } /** * opens the expandable item at the given position * * @param position the global position * @param notifyItemChanged true if we need to call notifyItemChanged. DEFAULT: false */ public void expand(int position, boolean notifyItemChanged) { Item item = getItem(position); if (item != null && item instanceof IExpandable) { IExpandable<?, Item> expandable = (IExpandable<?, Item>) item; if (mPositionBasedStateManagement) { //if this item is not already expanded and has sub items we go on if (mExpanded.indexOfKey(position) < 0 && expandable.getSubItems() != null && expandable.getSubItems().size() > 0) { IAdapter<Item> adapter = getAdapter(position); if (adapter != null && adapter instanceof IItemAdapter) { ((IItemAdapter<Item>) adapter).add(position + 1, expandable.getSubItems()); } //remember that this item is now opened (not collapsed) expandable.withIsExpanded(true); //we need to notify to get the correct drawable if there is one showing the current state if (notifyItemChanged) { notifyItemChanged(position); } //store it in the list of opened expandable items mExpanded.put(position, expandable.getSubItems() != null ? expandable.getSubItems().size() : 0); } } else { //if this item is not already expanded and has sub items we go on if (!expandable.isExpanded() && expandable.getSubItems() != null && expandable.getSubItems().size() > 0) { IAdapter<Item> adapter = getAdapter(position); if (adapter != null && adapter instanceof IItemAdapter) { ((IItemAdapter<Item>) adapter).add(position + 1, expandable.getSubItems()); } //remember that this item is now opened (not collapsed) expandable.withIsExpanded(true); //we need to notify to get the correct drawable if there is one showing the current state if (notifyItemChanged) { notifyItemChanged(position); } } } } } //------------------------- //------------------------- //wrap the notify* methods so we can have our required selection adjustment code //------------------------- //------------------------- /** * wraps notifyDataSetChanged */ public void notifyAdapterDataSetChanged() { if (mPositionBasedStateManagement) { mSelections.clear(); mExpanded.clear(); } cacheSizes(); notifyDataSetChanged(); if (mPositionBasedStateManagement) { //we make sure the new items are displayed properly AdapterUtil.handleStates(this, 0, getItemCount() - 1); } } /** * wraps notifyItemInserted * * @param position the global position */ public void notifyAdapterItemInserted(int position) { notifyAdapterItemRangeInserted(position, 1); } /** * wraps notifyItemRangeInserted * * @param position the global position * @param itemCount the count of items inserted */ public void notifyAdapterItemRangeInserted(int position, int itemCount) { //we have to update all current stored selection and expandable states in our map if (mPositionBasedStateManagement) { mSelections = AdapterUtil.adjustPosition(mSelections, position, Integer.MAX_VALUE, itemCount); mExpanded = AdapterUtil.adjustPosition(mExpanded, position, Integer.MAX_VALUE, itemCount); } cacheSizes(); notifyItemRangeInserted(position, itemCount); if (mPositionBasedStateManagement) { //we make sure the new items are displayed properly AdapterUtil.handleStates(this, position, position + itemCount - 1); } } /** * wraps notifyItemRemoved * * @param position the global position */ public void notifyAdapterItemRemoved(int position) { notifyAdapterItemRangeRemoved(position, 1); } /** * wraps notifyItemRangeRemoved * * @param position the global position * @param itemCount the count of items removed */ public void notifyAdapterItemRangeRemoved(int position, int itemCount) { //we have to update all current stored selection and expandable states in our map if (mPositionBasedStateManagement) { mSelections = AdapterUtil.adjustPosition(mSelections, position, Integer.MAX_VALUE, itemCount * (-1)); mExpanded = AdapterUtil.adjustPosition(mExpanded, position, Integer.MAX_VALUE, itemCount * (-1)); } cacheSizes(); notifyItemRangeRemoved(position, itemCount); } /** * wraps notifyItemMoved * * @param fromPosition the global fromPosition * @param toPosition the global toPosition */ public void notifyAdapterItemMoved(int fromPosition, int toPosition) { //collapse items we move. just in case :D collapse(fromPosition); collapse(toPosition); if (mPositionBasedStateManagement) { if (!mSelections.contains(fromPosition) && mSelections.contains(toPosition)) { mSelections.remove(toPosition); mSelections.add(fromPosition); } else if (mSelections.contains(fromPosition) && !mSelections.contains(toPosition)) { mSelections.remove(fromPosition); mSelections.add(toPosition); } } notifyItemMoved(fromPosition, toPosition); } /** * wraps notifyItemChanged * * @param position the global position */ public void notifyAdapterItemChanged(int position) { notifyAdapterItemChanged(position, null); } /** * wraps notifyItemChanged * * @param position the global position * @param payload additional payload */ public void notifyAdapterItemChanged(int position, Object payload) { notifyAdapterItemRangeChanged(position, 1, payload); } /** * wraps notifyItemRangeChanged * * @param position the global position * @param itemCount the count of items changed */ public void notifyAdapterItemRangeChanged(int position, int itemCount) { notifyAdapterItemRangeChanged(position, itemCount, null); } /** * wraps notifyItemRangeChanged * * @param position the global position * @param itemCount the count of items changed * @param payload an additional payload */ public void notifyAdapterItemRangeChanged(int position, int itemCount, Object payload) { for (int i = position; i < position + itemCount; i++) { if (mPositionBasedStateManagement) { if (mExpanded.indexOfKey(i) >= 0) { collapse(i); } } else { Item item = getItem(position); if (item instanceof IExpandable && ((IExpandable) item).isExpanded()) { collapse(position); } } } if (payload == null) { notifyItemRangeChanged(position, itemCount); } else { notifyItemRangeChanged(position, itemCount, payload); } if (mPositionBasedStateManagement) { //we make sure the new items are displayed properly AdapterUtil.handleStates(this, position, position + itemCount - 1); } } /** * notifies the fastAdapter about new / removed items within a sub hierarchy * NOTE this currently only works for sub items with only 1 level * * @param position the global position of the parent item */ public void notifyAdapterSubItemsChanged(int position) { //TODO ALSO CARE ABOUT SUB SUB ... HIRACHIES if (mPositionBasedStateManagement) { //we only need to do something if this item is expanded if (mExpanded.indexOfKey(position) > -1) { int previousCount = mExpanded.get(position); int itemsCount = notifyAdapterSubItemsChanged(position, previousCount); mExpanded.put(position, itemsCount); } } else { Log.e("FastAdapter", "please use the notifyAdapterSubItemsChanged(int position, int previousCount) method instead in the PositionBasedStateManagement mode, as we are not able to calculate the previous count "); } } /** * notifies the fastAdapter about new / removed items within a sub hierarchy * NOTE this currently only works for sub items with only 1 level * * @param position the global position of the parent item * @param previousCount the previous count of sub items * @return the new count of subItems */ public int notifyAdapterSubItemsChanged(int position, int previousCount) { Item item = getItem(position); if (item != null && item instanceof IExpandable) { IExpandable expandable = (IExpandable) item; IAdapter adapter = getAdapter(position); if (adapter != null && adapter instanceof IItemAdapter) { ((IItemAdapter) adapter).removeRange(position + 1, previousCount); ((IItemAdapter) adapter).add(position + 1, expandable.getSubItems()); } return expandable.getSubItems().size(); } return 0; } //listeners public interface OnTouchListener<Item extends IItem> { /** * the onTouch event of a specific item inside the RecyclerView * * @param v the view we clicked * @param event the touch event * @param adapter the adapter which is responsible for the given item * @param item the IItem which was clicked * @param position the global position * @return return true if the event was consumed, otherwise false */ boolean onTouch(View v, MotionEvent event, IAdapter<Item> adapter, Item item, int position); } public interface OnClickListener<Item extends IItem> { /** * the onClick event of a specific item inside the RecyclerView * * @param v the view we clicked * @param adapter the adapter which is responsible for the given item * @param item the IItem which was clicked * @param position the global position * @return return true if the event was consumed, otherwise false */ boolean onClick(View v, IAdapter<Item> adapter, Item item, int position); } public interface OnLongClickListener<Item extends IItem> { /** * the onLongClick event of a specific item inside the RecyclerView * * @param v the view we clicked * @param adapter the adapter which is responsible for the given item * @param item the IItem which was clicked * @param position the global position * @return return true if the event was consumed, otherwise false */ boolean onLongClick(View v, IAdapter<Item> adapter, Item item, int position); } public interface OnCreateViewHolderListener { /** * is called inside the onCreateViewHolder method and creates the viewHolder based on the provided viewTyp * * @param parent the parent which will host the View * @param viewType the type of the ViewHolder we want to create * @return the generated ViewHolder based on the given viewType */ RecyclerView.ViewHolder onPreCreateViewHolder(ViewGroup parent, int viewType); /** * is called after the viewHolder was created and the default listeners were added * * @param viewHolder the created viewHolder after all listeners were set * @return the viewHolder given as param */ RecyclerView.ViewHolder onPostCreateViewHolder(RecyclerView.ViewHolder viewHolder); } /** * default implementation of the OnCreateViewHolderListener */ public class OnCreateViewHolderListenerImpl implements OnCreateViewHolderListener { /** * is called inside the onCreateViewHolder method and creates the viewHolder based on the provided viewTyp * * @param parent the parent which will host the View * @param viewType the type of the ViewHolder we want to create * @return the generated ViewHolder based on the given viewType */ @Override public RecyclerView.ViewHolder onPreCreateViewHolder(ViewGroup parent, int viewType) { return getTypeInstance(viewType).getViewHolder(parent); } /** * is called after the viewHolder was created and the default listeners were added * * @param viewHolder the created viewHolder after all listeners were set * @return the viewHolder given as param */ @Override public RecyclerView.ViewHolder onPostCreateViewHolder(RecyclerView.ViewHolder viewHolder) { return viewHolder; } } public interface OnBindViewHolderListener { /** * is called in onBindViewHolder to bind the data on the ViewHolder * * @param viewHolder the viewHolder for the type at this position * @param position the position of thsi viewHolder */ void onBindViewHolder(RecyclerView.ViewHolder viewHolder, int position); } public class OnBindViewHolderListenerImpl implements OnBindViewHolderListener { /** * is called in onBindViewHolder to bind the data on the ViewHolder * * @param viewHolder the viewHolder for the type at this position * @param position the position of this viewHolder */ @Override public void onBindViewHolder(RecyclerView.ViewHolder viewHolder, int position) { getItem(position).bindView(viewHolder); } } /** * an internal class to return the IItem and relativePosition and it's adapter at once. used to save one iteration inside the getInternalItem method */ public static class RelativeInfo<Item extends IItem> { public IAdapter<Item> adapter = null; public Item item = null; public int position = -1; } }
library/src/main/java/com/mikepenz/fastadapter/FastAdapter.java
package com.mikepenz.fastadapter; import android.os.Bundle; import android.support.v4.util.ArrayMap; import android.support.v7.widget.RecyclerView; import android.util.Log; import android.util.SparseIntArray; import android.view.MotionEvent; import android.view.View; import android.view.ViewGroup; import com.mikepenz.fastadapter.utils.AdapterUtil; import java.util.ArrayList; import java.util.HashSet; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.NavigableMap; import java.util.Set; import java.util.SortedSet; import java.util.TreeMap; import java.util.TreeSet; /** * Created by mikepenz on 27.12.15. */ public class FastAdapter<Item extends IItem> extends RecyclerView.Adapter<RecyclerView.ViewHolder> { protected static final String BUNDLE_SELECTIONS = "bundle_selections"; protected static final String BUNDLE_EXPANDED = "bundle_expanded"; // we remember all adapters //priority queue... final private ArrayMap<Integer, IAdapter<Item>> mAdapters = new ArrayMap<>(); // we remember all possible types so we can create a new view efficiently final private ArrayMap<Integer, Item> mTypeInstances = new ArrayMap<>(); // cache the sizes of the different adapters so we can access the items more performant final private NavigableMap<Integer, IAdapter<Item>> mAdapterSizes = new TreeMap<>(); // the total size private int mGlobalSize = 0; // if enabled we will select the item via a notifyItemChanged -> will animate with the Animator // you can also use this if you have any custom logic for selections, and do not depend on the "selected" state of the view // note if enabled it will feel a bit slower because it will animate the selection private boolean mSelectWithItemUpdate = false; // if we want multiSelect enabled private boolean mMultiSelect = false; // if we want the multiSelect only on longClick private boolean mSelectOnLongClick = false; // if a user can deselect a selection via click. required if there is always one selected item! private boolean mAllowDeselection = true; // if items are selectable in general private boolean mSelectable = false; // only one expanded section private boolean mOnlyOneExpandedItem = false; // if we use the positionBasedStateManagement or the "stateless" managment private boolean mPositionBasedStateManagement = false; // we need to remember all selections to recreate them after orientation change private SortedSet<Integer> mSelections = new TreeSet<>(); // we need to remember all expanded items to recreate them after orientation change private SparseIntArray mExpanded = new SparseIntArray(); // the listeners which can be hooked on an item private OnClickListener<Item> mOnPreClickListener; private OnClickListener<Item> mOnClickListener; private OnLongClickListener<Item> mOnPreLongClickListener; private OnLongClickListener<Item> mOnLongClickListener; private OnTouchListener<Item> mOnTouchListener; //the listeners for onCreateViewHolder or onBindViewHolder private OnCreateViewHolderListener mOnCreateViewHolderListener = new OnCreateViewHolderListenerImpl(); private OnBindViewHolderListener mOnBindViewHolderListener = new OnBindViewHolderListenerImpl(); /** * default CTOR */ public FastAdapter() { setHasStableIds(true); } /** * Define the OnClickListener which will be used for a single item * * @param onClickListener the OnClickListener which will be used for a single item * @return this */ public FastAdapter<Item> withOnClickListener(OnClickListener<Item> onClickListener) { this.mOnClickListener = onClickListener; return this; } /** * Define the OnPreClickListener which will be used for a single item and is called after all internal methods are done * * @param onPreClickListener the OnPreClickListener which will be called after a single item was clicked and all internal methods are done * @return this */ public FastAdapter<Item> withOnPreClickListener(OnClickListener<Item> onPreClickListener) { this.mOnPreClickListener = onPreClickListener; return this; } /** * Define the OnLongClickListener which will be used for a single item * * @param onLongClickListener the OnLongClickListener which will be used for a single item * @return this */ public FastAdapter<Item> withOnLongClickListener(OnLongClickListener<Item> onLongClickListener) { this.mOnLongClickListener = onLongClickListener; return this; } /** * Define the OnLongClickListener which will be used for a single item and is called after all internal methods are done * * @param onPreLongClickListener the OnLongClickListener which will be called after a single item was clicked and all internal methods are done * @return this */ public FastAdapter<Item> withOnPreLongClickListener(OnLongClickListener<Item> onPreLongClickListener) { this.mOnPreLongClickListener = onPreLongClickListener; return this; } /** * Define the TouchListener which will be used for a single item * * @param onTouchListener the TouchListener which will be used for a single item * @return this */ public FastAdapter<Item> withOnTouchListener(OnTouchListener<Item> onTouchListener) { this.mOnTouchListener = onTouchListener; return this; } /** * allows you to set a custom OnCreateViewHolderListener which will be used before and after the ViewHolder is created * You may check the OnCreateViewHolderListenerImpl for the default behavior * * @param onCreateViewHolderListener the OnCreateViewHolderListener (you may use the OnCreateViewHolderListenerImpl) */ public FastAdapter<Item> withOnCreateViewHolderListener(OnCreateViewHolderListener onCreateViewHolderListener) { this.mOnCreateViewHolderListener = onCreateViewHolderListener; return this; } /** * allows you to set an custom OnBindViewHolderListener which is used to bind the view. This will overwrite the libraries behavior. * You may check the OnBindViewHolderListenerImpl for the default behavior * * @param onBindViewHolderListener the OnBindViewHolderListener */ public FastAdapter<Item> withOnBindViewHolderListener(OnBindViewHolderListener onBindViewHolderListener) { this.mOnBindViewHolderListener = onBindViewHolderListener; return this; } /** * select between the different selection behaviors. * there are now 2 different variants of selection. you can toggle this via `withSelectWithItemUpdate(boolean)` (where false == default - variant 1) * 1.) direct selection via the view "selected" state, we also make sure we do not animate here so no notifyItemChanged is called if we repeatly press the same item * 2.) we select the items via a notifyItemChanged. this will allow custom selected logics within your views (isSelected() - do something...) and it will also animate the change via the provided itemAnimator. because of the animation of the itemAnimator the selection will have a small delay (time of animating) * * @param selectWithItemUpdate true if notifyItemChanged should be called upon select * @return this */ public FastAdapter<Item> withSelectWithItemUpdate(boolean selectWithItemUpdate) { this.mSelectWithItemUpdate = selectWithItemUpdate; return this; } /** * Enable this if you want multiSelection possible in the list * * @param multiSelect true to enable multiSelect * @return this */ public FastAdapter<Item> withMultiSelect(boolean multiSelect) { mMultiSelect = multiSelect; return this; } /** * Disable this if you want the selection on a single tap * * @param selectOnLongClick false to do select via single click * @return this */ public FastAdapter<Item> withSelectOnLongClick(boolean selectOnLongClick) { mSelectOnLongClick = selectOnLongClick; return this; } /** * If false, a user can't deselect an item via click (you can still do this programmatically) * * @param allowDeselection true if a user can deselect an already selected item via click * @return this */ public FastAdapter<Item> withAllowDeselection(boolean allowDeselection) { this.mAllowDeselection = allowDeselection; return this; } /** * set if no item is selectable * * @param selectable true if items are selectable * @return this */ public FastAdapter<Item> withSelectable(boolean selectable) { this.mSelectable = selectable; return this; } /** * set if we want to use the positionBasedStateManagement (high performant for lists up to Integer.MAX_INT) * set to false if you want to use the new stateManagement which will come with more flexibility (but worse performance on long lists) * * @param mPositionBasedStateManagement false to enable the alternative "stateLess" stateManagement * @return this */ public FastAdapter<Item> withPositionBasedStateManagement(boolean mPositionBasedStateManagement) { this.mPositionBasedStateManagement = mPositionBasedStateManagement; return this; } /** * @return if items are selectable */ public boolean isSelectable() { return mSelectable; } /** * @return if this FastAdapter is configured with the PositionBasedStateManagement */ public boolean isPositionBasedStateManagement() { return mPositionBasedStateManagement; } /** * set if there should only be one opened expandable item * DEFAULT: false * * @param mOnlyOneExpandedItem true if there should be only one expanded, expandable item in the list * @return this */ public FastAdapter<Item> withOnlyOneExpandedItem(boolean mOnlyOneExpandedItem) { this.mOnlyOneExpandedItem = mOnlyOneExpandedItem; return this; } /** * @return if there should be only one expanded, expandable item in the list */ public boolean isOnlyOneExpandedItem() { return mOnlyOneExpandedItem; } /** * re-selects all elements stored in the savedInstanceState * IMPORTANT! Call this method only after all items where added to the adapters again. Otherwise it may select wrong items! * * @param savedInstanceState If the activity is being re-initialized after * previously being shut down then this Bundle contains the data it most * recently supplied in Note: Otherwise it is null. * @return this */ public FastAdapter<Item> withSavedInstanceState(Bundle savedInstanceState) { return withSavedInstanceState(savedInstanceState, ""); } /** * re-selects all elements stored in the savedInstanceState * IMPORTANT! Call this method only after all items where added to the adapters again. Otherwise it may select wrong items! * * @param savedInstanceState If the activity is being re-initialized after * previously being shut down then this Bundle contains the data it most * recently supplied in Note: Otherwise it is null. * @param prefix a prefix added to the savedInstance key so we can store multiple states * @return this */ public FastAdapter<Item> withSavedInstanceState(Bundle savedInstanceState, String prefix) { if (savedInstanceState != null) { //make sure already done selections are removed deselect(); if (mPositionBasedStateManagement) { //first restore opened collasable items, as otherwise may not all selections could be restored int[] expandedItems = savedInstanceState.getIntArray(BUNDLE_EXPANDED + prefix); if (expandedItems != null) { for (Integer expandedItem : expandedItems) { expand(expandedItem); } } //restore the selections int[] selections = savedInstanceState.getIntArray(BUNDLE_SELECTIONS + prefix); if (selections != null) { for (Integer selection : selections) { select(selection); } } } else { ArrayList<String> expandedItems = savedInstanceState.getStringArrayList(BUNDLE_EXPANDED + prefix); ArrayList<String> selectedItems = savedInstanceState.getStringArrayList(BUNDLE_SELECTIONS + prefix); for (int i = 0; i < getItemCount(); i++) { Item item = getItem(i); String id = String.valueOf(item.getIdentifier()); if (expandedItems != null && expandedItems.contains(id)) { expand(i); } if (selectedItems != null && selectedItems.contains(id)) { select(i); } //we also have to restore the selections for subItems AdapterUtil.restoreSubItemSelectionStatesForAlternativeStateManagement(item, selectedItems); } } } return this; } /** * registers an AbstractAdapter which will be hooked into the adapter chain * * @param adapter an adapter which extends the AbstractAdapter */ public <A extends AbstractAdapter<Item>> void registerAdapter(A adapter) { if (!mAdapters.containsKey(adapter.getOrder())) { mAdapters.put(adapter.getOrder(), adapter); cacheSizes(); } } /** * register a new type into the TypeInstances to be able to efficiently create thew ViewHolders * * @param item an IItem which will be shown in the list */ public void registerTypeInstance(Item item) { if (!mTypeInstances.containsKey(item.getType())) { mTypeInstances.put(item.getType(), item); } } /** * gets the TypeInstance remembered within the FastAdapter for an item * * @param type the int type of the item * @return the Item typeInstance */ public Item getTypeInstance(int type) { return mTypeInstances.get(type); } /** * Creates the ViewHolder by the viewType * * @param parent the parent view (the RecyclerView) * @param viewType the current viewType which is bound * @return the ViewHolder with the bound data */ @Override public RecyclerView.ViewHolder onCreateViewHolder(ViewGroup parent, int viewType) { final RecyclerView.ViewHolder holder = mOnCreateViewHolderListener.onPreCreateViewHolder(parent, viewType); //handle click behavior holder.itemView.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { int pos = holder.getAdapterPosition(); if (pos != RecyclerView.NO_POSITION) { boolean consumed = false; RelativeInfo<Item> relativeInfo = getRelativeInfo(pos); Item item = relativeInfo.item; if (item != null && item.isEnabled()) { //on the very first we call the click listener from the item itself (if defined) if (item instanceof IClickable && ((IClickable) item).getOnPreItemClickListener() != null) { consumed = ((IClickable<Item>) item).getOnPreItemClickListener().onClick(v, relativeInfo.adapter, item, pos); } //first call the onPreClickListener which would allow to prevent executing of any following code, including selection if (!consumed && mOnPreClickListener != null) { consumed = mOnPreClickListener.onClick(v, relativeInfo.adapter, item, pos); } //if this is a expandable item :D if (!consumed && item instanceof IExpandable) { if (((IExpandable) item).getSubItems() != null) { toggleExpandable(pos); } } //if there should be only one expanded item we want to collapse all the others but the current one if (mOnlyOneExpandedItem) { int[] expandedItems = getExpandedItems(); for (int i = expandedItems.length - 1; i >= 0; i--) { if (expandedItems[i] != pos) { collapse(expandedItems[i], true); } } } //handle the selection if the event was not yet consumed, and we are allowed to select an item (only occurs when we select with long click only) if (!consumed && !mSelectOnLongClick && mSelectable) { handleSelection(v, item, pos); } //before calling the global adapter onClick listener call the item specific onClickListener if (item instanceof IClickable && ((IClickable) item).getOnItemClickListener() != null) { consumed = ((IClickable<Item>) item).getOnItemClickListener().onClick(v, relativeInfo.adapter, item, pos); } //call the normal click listener after selection was handlded if (!consumed && mOnClickListener != null) { mOnClickListener.onClick(v, relativeInfo.adapter, item, pos); } } } } }); //handle long click behavior holder.itemView.setOnLongClickListener(new View.OnLongClickListener() { @Override public boolean onLongClick(View v) { int pos = holder.getAdapterPosition(); if (pos != RecyclerView.NO_POSITION) { boolean consumed = false; RelativeInfo<Item> relativeInfo = getRelativeInfo(pos); if (relativeInfo.item != null && relativeInfo.item.isEnabled()) { //first call the OnPreLongClickListener which would allow to prevent executing of any following code, including selection if (mOnPreLongClickListener != null) { consumed = mOnPreLongClickListener.onLongClick(v, relativeInfo.adapter, relativeInfo.item, pos); } //now handle the selection if we are in multiSelect mode and allow selecting on longClick if (!consumed && mSelectOnLongClick && mSelectable) { handleSelection(v, relativeInfo.item, pos); } //call the normal long click listener after selection was handled if (mOnLongClickListener != null) { consumed = mOnLongClickListener.onLongClick(v, relativeInfo.adapter, relativeInfo.item, pos); } } return consumed; } return false; } }); //handle touch behavior holder.itemView.setOnTouchListener(new View.OnTouchListener() { @Override public boolean onTouch(View v, MotionEvent event) { if (mOnTouchListener != null) { int pos = holder.getAdapterPosition(); if (pos != RecyclerView.NO_POSITION) { RelativeInfo<Item> relativeInfo = getRelativeInfo(pos); return mOnTouchListener.onTouch(v, event, relativeInfo.adapter, relativeInfo.item, pos); } } return false; } }); return mOnCreateViewHolderListener.onPostCreateViewHolder(holder); } /** * Binds the data to the created ViewHolder and sets the listeners to the holder.itemView * * @param holder the viewHolder we bind the data on * @param position the global position */ @Override public void onBindViewHolder(final RecyclerView.ViewHolder holder, int position) { mOnBindViewHolderListener.onBindViewHolder(holder, position); } /** * Searches for the given item and calculates it's global position * * @param item the item which is searched for * @return the global position, or -1 if not found */ public int getPosition(Item item) { if (item.getIdentifier() == -1) { Log.e("FastAdapter", "You have to define an identifier for your item to retrieve the position via this method"); return -1; } int position = 0; int length = mAdapters.size(); for (int i = 0; i < length; i++) { IAdapter<Item> adapter = mAdapters.valueAt(i); if (adapter.getOrder() < 0) { continue; } int relativePosition = adapter.getAdapterPosition(item); if (relativePosition != -1) { return position + relativePosition; } position = adapter.getAdapterItemCount(); } return -1; } /** * gets the IItem by a position, from all registered adapters * * @param position the global position * @return the found IItem or null */ public Item getItem(int position) { //if we are out of range just return null if (position < 0 || position >= mGlobalSize) { return null; } //now get the adapter which is responsible for the given position Map.Entry<Integer, IAdapter<Item>> entry = mAdapterSizes.floorEntry(position); return entry.getValue().getAdapterItem(position - entry.getKey()); } /** * Internal method to get the Item as ItemHolder which comes with the relative position within it's adapter * Finds the responsible adapter for the given position * * @param position the global position * @return the adapter which is responsible for this position */ public RelativeInfo<Item> getRelativeInfo(int position) { if (position < 0) { return new RelativeInfo<>(); } RelativeInfo<Item> relativeInfo = new RelativeInfo<>(); Map.Entry<Integer, IAdapter<Item>> entry = mAdapterSizes.floorEntry(position); if (entry != null) { relativeInfo.item = entry.getValue().getAdapterItem(position - entry.getKey()); relativeInfo.adapter = entry.getValue(); relativeInfo.position = position; } return relativeInfo; } /** * Gets the adapter for the given position * * @param position the global position * @return the adapter responsible for this global position */ public IAdapter<Item> getAdapter(int position) { //if we are out of range just return null if (position < 0 || position >= mGlobalSize) { return null; } //now get the adapter which is responsible for the given position return mAdapterSizes.floorEntry(position).getValue(); } /** * finds the int ItemViewType from the IItem which exists at the given position * * @param position the global position * @return the viewType for this position */ @Override public int getItemViewType(int position) { return getItem(position).getType(); } /** * finds the int ItemId from the IItem which exists at the given position * * @param position the global position * @return the itemId for this position */ @Override public long getItemId(int position) { return getItem(position).getIdentifier(); } /** * calculates the total ItemCount over all registered adapters * * @return the global count */ public int getItemCount() { return mGlobalSize; } /** * calculates the item count up to a given (excluding this) order number * * @param order the number up to which the items are counted * @return the total count of items up to the adapter order */ public int getPreItemCountByOrder(int order) { //if we are empty just return 0 count if (mGlobalSize == 0) { return 0; } int size = 0; //count the number of items before the adapter with the given order for (IAdapter<Item> adapter : mAdapters.values()) { if (adapter.getOrder() == order) { return size; } else { size = size + adapter.getAdapterItemCount(); } } //get the count of items which are before this order return size; } /** * calculates the item count up to a given (excluding this) adapter (defined by the global position of the item) * * @param position the global position of an adapter item * @return the total count of items up to the adapter which holds the given position */ public int getPreItemCount(int position) { //if we are empty just return 0 count if (mGlobalSize == 0) { return 0; } //get the count of items which are before this order return mAdapterSizes.floorKey(position); } /** * calculates the count of expandable items before a given position * * @param from the global start position you should pass here the count of items of the previous adapters (or 0 if you want to start from the beginning) * @param position the global position * @return the count of expandable items before a given position */ public int getExpandedItemsCount(int from, int position) { int totalAddedItems = 0; if (mPositionBasedStateManagement) { int length = mExpanded.size(); for (int i = 0; i < length; i++) { //now we count the amount of expanded items within our range we check if (mExpanded.keyAt(i) >= from && mExpanded.keyAt(i) < position) { totalAddedItems = totalAddedItems + mExpanded.get(mExpanded.keyAt(i)); } else if (mExpanded.keyAt(i) >= position) { //we do not care about all expanded items which are outside our range break; } } } else { //first we find out how many items were added in total //also counting subItems for (int i = position; i < position + totalAddedItems; i++) { Item tmp = getItem(i); if (tmp instanceof IExpandable) { IExpandable tmpExpandable = ((IExpandable) tmp); if (tmpExpandable.getSubItems() != null && tmpExpandable.isExpanded()) { totalAddedItems = totalAddedItems + tmpExpandable.getSubItems().size(); } } } } return totalAddedItems; } /** * add the values to the bundle for saveInstanceState * * @param savedInstanceState If the activity is being re-initialized after * previously being shut down then this Bundle contains the data it most * recently supplied in Note: Otherwise it is null. * @return the passed bundle with the newly added data */ public Bundle saveInstanceState(Bundle savedInstanceState) { return saveInstanceState(savedInstanceState, ""); } /** * add the values to the bundle for saveInstanceState * * @param savedInstanceState If the activity is being re-initialized after * previously being shut down then this Bundle contains the data it most * recently supplied in Note: Otherwise it is null. * @param prefix a prefix added to the savedInstance key so we can store multiple states * @return the passed bundle with the newly added data */ public Bundle saveInstanceState(Bundle savedInstanceState, String prefix) { if (savedInstanceState != null) { if (mPositionBasedStateManagement) { //remember the selections int[] selections = new int[mSelections.size()]; int index = 0; for (Integer selection : mSelections) { selections[index] = selection; index++; } savedInstanceState.putIntArray(BUNDLE_SELECTIONS + prefix, selections); //remember the collapsed states savedInstanceState.putIntArray(BUNDLE_EXPANDED + prefix, getExpandedItems()); } else { ArrayList<String> selections = new ArrayList<>(); ArrayList<String> expandedItems = new ArrayList<>(); int length = getItemCount(); for (int i = 0; i < length; i++) { Item item = getItem(i); if (item instanceof IExpandable && ((IExpandable) item).isExpanded()) { expandedItems.add(String.valueOf(item.getIdentifier())); } if (item.isSelected()) { selections.add(String.valueOf(item.getIdentifier())); } //we also have to find all selections in the sub hirachies AdapterUtil.findSubItemSelections(item, selections); } //remember the selections savedInstanceState.putStringArrayList(BUNDLE_SELECTIONS + prefix, selections); //remember the collapsed states savedInstanceState.putStringArrayList(BUNDLE_EXPANDED + prefix, expandedItems); } } return savedInstanceState; } /** * we cache the sizes of our adapters so get accesses are faster */ private void cacheSizes() { mAdapterSizes.clear(); int size = 0; //we also have to add this for the first adapter otherwise the floorKey method will return the wrong value if (mAdapters.size() > 0) { mAdapterSizes.put(0, mAdapters.valueAt(0)); } for (IAdapter<Item> adapter : mAdapters.values()) { if (adapter.getAdapterItemCount() > 0) { mAdapterSizes.put(size, adapter); size = size + adapter.getAdapterItemCount(); } } mGlobalSize = size; } //------------------------- //------------------------- //Selection stuff //------------------------- //------------------------- /** * @return a set with the global positions of all selected items */ public Set<Integer> getSelections() { if (mPositionBasedStateManagement) { return mSelections; } else { Set<Integer> selections = new HashSet<>(); int length = getItemCount(); for (int i = 0; i < length; i++) { Item item = getItem(i); if (item.isSelected()) { selections.add(i); } } return selections; } } /** * @return a set with the items which are currently selected */ public Set<Item> getSelectedItems() { Set<Item> items = new HashSet<>(); for (Integer position : getSelections()) { items.add(getItem(position)); } return items; } /** * toggles the selection of the item at the given position * * @param position the global position */ public void toggleSelection(int position) { if (mPositionBasedStateManagement) { if (mSelections.contains(position)) { deselect(position); } else { select(position); } } else { if (getItem(position).isSelected()) { deselect(position); } else { select(position); } } } /** * handles the selection and deselects item if multiSelect is disabled * * @param position the global position */ private void handleSelection(View view, Item item, int position) { //if this item is not selectable don't continue if (!item.isSelectable()) { return; } //if we have disabled deselection via click don't continue if (item.isSelected() && !mAllowDeselection) { return; } boolean selected = mSelections.contains(position); if (mSelectWithItemUpdate || view == null) { if (!mMultiSelect) { deselect(); } if (selected) { deselect(position); } else { select(position); } } else { if (!mMultiSelect) { //we have to separately handle deselection here because if we toggle the current item we do not want to deselect this first! if (mPositionBasedStateManagement) { Iterator<Integer> entries = mSelections.iterator(); while (entries.hasNext()) { //deselect all but the current one! this is important! Integer pos = entries.next(); if (pos != position) { deselect(pos, entries); } } } else { Set<Integer> selections = getSelections(); for (int pos : selections) { if (pos != position) { deselect(pos); } } } } //we toggle the state of the view item.withSetSelected(!selected); view.setSelected(!selected); //now we make sure we remember the selection! if (mPositionBasedStateManagement) { if (selected) { if (mSelections.contains(position)) { mSelections.remove(position); } } else { mSelections.add(position); } } } } /** * selects all items at the positions in the iteratable * * @param positions the global positions to select */ public void select(Iterable<Integer> positions) { for (Integer position : positions) { select(position); } } /** * selects an item and remembers it's position in the selections list * * @param position the global position */ public void select(int position) { select(position, false); } /** * selects an item and remembers it's position in the selections list * * @param position the global position * @param fireEvent true if the onClick listener should be called */ public void select(int position, boolean fireEvent) { Item item = getItem(position); if (item != null) { item.withSetSelected(true); if (mPositionBasedStateManagement) { mSelections.add(position); } } notifyItemChanged(position); if (mOnClickListener != null && fireEvent) { mOnClickListener.onClick(null, getAdapter(position), item, position); } } /** * deselects all selections */ public void deselect() { if (mPositionBasedStateManagement) { deselect(mSelections); } else { deselect(getSelections()); } } /** * deselects all items at the positions in the iteratable * * @param positions the global positions to deselect */ public void deselect(Iterable<Integer> positions) { Iterator<Integer> entries = positions.iterator(); while (entries.hasNext()) { deselect(entries.next(), entries); } } /** * deselects an item and removes it's position in the selections list * * @param position the global position */ public void deselect(int position) { deselect(position, null); } /** * deselects an item and removes it's position in the selections list * also takes an iterator to remove items from the map * * @param position the global position * @param entries the iterator which is used to deselect all */ private void deselect(int position, Iterator<Integer> entries) { Item item = getItem(position); if (item != null) { item.withSetSelected(false); } if (entries == null) { if (mPositionBasedStateManagement) { if (mSelections.contains(position)) { mSelections.remove(position); } } } else { entries.remove(); } notifyItemChanged(position); } /** * deletes all current selected items * * @return a list of the IItem elements which were deleted */ public List<Item> deleteAllSelectedItems() { List<Item> deletedItems = new LinkedList<>(); //we have to re-fetch the selections array again and again as the position will change after one item is deleted if (mPositionBasedStateManagement) { Set<Integer> selections = getSelections(); while (selections.size() > 0) { Iterator<Integer> iterator = selections.iterator(); int position = iterator.next(); IAdapter adapter = getAdapter(position); if (adapter != null && adapter instanceof IItemAdapter) { deletedItems.add(getItem(position)); ((IItemAdapter) adapter).remove(position); } else { iterator.remove(); } selections = getSelections(); } } else { int length = getItemCount(); for (int i = length - 1; i >= 0; i--) { RelativeInfo<Item> ri = getRelativeInfo(i); if (ri.item.isSelected()) { if (ri.adapter != null && ri.adapter instanceof IItemAdapter) { ((IItemAdapter) ri.adapter).remove(i); } } } } return deletedItems; } //------------------------- //------------------------- //Expandable stuff //------------------------- //------------------------- /** * returns the expanded items this contains position and the count of items * which are expanded by this position * * @return the expanded items */ public SparseIntArray getExpanded() { if (mPositionBasedStateManagement) { return mExpanded; } else { SparseIntArray expandedItems = new SparseIntArray(); int length = getItemCount(); for (int i = 0; i < length; i++) { Item item = getItem(i); if (item instanceof IExpandable && ((IExpandable) item).isExpanded()) { expandedItems.put(i, ((IExpandable) item).getSubItems().size()); } } return expandedItems; } } /** * @return a set with the global positions of all expanded items */ public int[] getExpandedItems() { int[] expandedItems; if (mPositionBasedStateManagement) { int length = mExpanded.size(); expandedItems = new int[length]; for (int i = 0; i < length; i++) { expandedItems[i] = mExpanded.keyAt(i); } } else { ArrayList<Integer> expandedItemsList = new ArrayList<>(); int length = getItemCount(); for (int i = 0; i < length; i++) { Item item = getItem(i); if (item instanceof IExpandable && ((IExpandable) item).isExpanded()) { expandedItemsList.add(i); } } int expandedItemsListLength = expandedItemsList.size(); expandedItems = new int[expandedItemsListLength]; for (int i = 0; i < expandedItemsListLength; i++) { expandedItems[i] = expandedItemsList.get(i); } } return expandedItems; } /** * toggles the expanded state of the given expandable item at the given position * * @param position the global position */ public void toggleExpandable(int position) { if (mPositionBasedStateManagement) { if (mExpanded.indexOfKey(position) >= 0) { collapse(position); } else { expand(position); } } else { Item item = getItem(position); if (item instanceof IExpandable && ((IExpandable) item).isExpanded()) { collapse(position); } else { expand(position); } } } /** * collapses all expanded items */ public void collapse() { collapse(true); } /** * collapses all expanded items * * @param notifyItemChanged true if we need to call notifyItemChanged. DEFAULT: false */ public void collapse(boolean notifyItemChanged) { int[] expandedItems = getExpandedItems(); for (int i = expandedItems.length - 1; i >= 0; i--) { collapse(expandedItems[i], notifyItemChanged); } } /** * collapses (closes) the given collapsible item at the given position * * @param position the global position */ public void collapse(int position) { collapse(position, false); } /** * collapses (closes) the given collapsible item at the given position * * @param position the global position * @param notifyItemChanged true if we need to call notifyItemChanged. DEFAULT: false */ public void collapse(int position, boolean notifyItemChanged) { Item item = getItem(position); if (item != null && item instanceof IExpandable) { IExpandable expandable = (IExpandable) item; //as we now know the item we will collapse we can collapse all subitems //if this item is not already collapsed and has sub items we go on if (expandable.isExpanded() && expandable.getSubItems() != null && expandable.getSubItems().size() > 0) { if (mPositionBasedStateManagement) { //first we find out how many items were added in total int totalAddedItems = expandable.getSubItems().size(); int length = mExpanded.size(); for (int i = 0; i < length; i++) { if (mExpanded.keyAt(i) > position && mExpanded.keyAt(i) <= position + totalAddedItems) { totalAddedItems = totalAddedItems + mExpanded.get(mExpanded.keyAt(i)); } } //we will deselect starting with the lowest one Iterator<Integer> selectionsIterator = mSelections.iterator(); while (selectionsIterator.hasNext()) { Integer value = selectionsIterator.next(); if (value > position && value <= position + totalAddedItems) { deselect(value, selectionsIterator); } } //now we start to collapse them for (int i = length - 1; i >= 0; i--) { if (mExpanded.keyAt(i) > position && mExpanded.keyAt(i) <= position + totalAddedItems) { //we collapsed those items now we remove update the added items totalAddedItems = totalAddedItems - mExpanded.get(mExpanded.keyAt(i)); //we collapse the item internalCollapse(mExpanded.keyAt(i), notifyItemChanged); } } //we collapse our root element internalCollapse(expandable, position, notifyItemChanged); } else { //first we find out how many items were added in total //also counting subitems int totalAddedItems = expandable.getSubItems().size(); for (int i = position + 1; i < position + totalAddedItems; i++) { Item tmp = getItem(i); if (tmp instanceof IExpandable) { IExpandable tmpExpandable = ((IExpandable) tmp); if (tmpExpandable.getSubItems() != null && tmpExpandable.isExpanded()) { totalAddedItems = totalAddedItems + tmpExpandable.getSubItems().size(); } } } //why... WHY?! for (int i = position + totalAddedItems - 1; i > position; i--) { Item tmp = getItem(i); if (tmp instanceof IExpandable) { IExpandable tmpExpandable = ((IExpandable) tmp); if (tmpExpandable.isExpanded()) { collapse(i); if (tmpExpandable.getSubItems() != null) { i = i - tmpExpandable.getSubItems().size(); } } } } //we collapse our root element internalCollapse(expandable, position, notifyItemChanged); } } } } private void internalCollapse(int position, boolean notifyItemChanged) { Item item = getItem(position); if (item != null && item instanceof IExpandable) { IExpandable expandable = (IExpandable) item; //if this item is not already collapsed and has sub items we go on if (expandable.isExpanded() && expandable.getSubItems() != null && expandable.getSubItems().size() > 0) { internalCollapse(expandable, position, notifyItemChanged); } } } private void internalCollapse(IExpandable expandable, int position, boolean notifyItemChanged) { IAdapter adapter = getAdapter(position); if (adapter != null && adapter instanceof IItemAdapter) { ((IItemAdapter) adapter).removeRange(position + 1, expandable.getSubItems().size()); } //remember that this item is now collapsed again expandable.withIsExpanded(false); //remove the information that this item was opened if (mPositionBasedStateManagement) { int indexOfKey = mExpanded.indexOfKey(position); if (indexOfKey >= 0) { mExpanded.removeAt(indexOfKey); } } //we need to notify to get the correct drawable if there is one showing the current state if (notifyItemChanged) { notifyItemChanged(position); } } /** * opens the expandable item at the given position * * @param position the global position */ public void expand(int position) { expand(position, false); } /** * opens the expandable item at the given position * * @param position the global position * @param notifyItemChanged true if we need to call notifyItemChanged. DEFAULT: false */ public void expand(int position, boolean notifyItemChanged) { Item item = getItem(position); if (item != null && item instanceof IExpandable) { IExpandable<?, Item> expandable = (IExpandable<?, Item>) item; if (mPositionBasedStateManagement) { //if this item is not already expanded and has sub items we go on if (mExpanded.indexOfKey(position) < 0 && expandable.getSubItems() != null && expandable.getSubItems().size() > 0) { IAdapter<Item> adapter = getAdapter(position); if (adapter != null && adapter instanceof IItemAdapter) { ((IItemAdapter<Item>) adapter).add(position + 1, expandable.getSubItems()); } //remember that this item is now opened (not collapsed) expandable.withIsExpanded(true); //we need to notify to get the correct drawable if there is one showing the current state if (notifyItemChanged) { notifyItemChanged(position); } //store it in the list of opened expandable items mExpanded.put(position, expandable.getSubItems() != null ? expandable.getSubItems().size() : 0); } } else { //if this item is not already expanded and has sub items we go on if (!expandable.isExpanded() && expandable.getSubItems() != null && expandable.getSubItems().size() > 0) { IAdapter<Item> adapter = getAdapter(position); if (adapter != null && adapter instanceof IItemAdapter) { ((IItemAdapter<Item>) adapter).add(position + 1, expandable.getSubItems()); } //remember that this item is now opened (not collapsed) expandable.withIsExpanded(true); //we need to notify to get the correct drawable if there is one showing the current state if (notifyItemChanged) { notifyItemChanged(position); } } } } } //------------------------- //------------------------- //wrap the notify* methods so we can have our required selection adjustment code //------------------------- //------------------------- /** * wraps notifyDataSetChanged */ public void notifyAdapterDataSetChanged() { if (mPositionBasedStateManagement) { mSelections.clear(); mExpanded.clear(); } cacheSizes(); notifyDataSetChanged(); if (mPositionBasedStateManagement) { //we make sure the new items are displayed properly AdapterUtil.handleStates(this, 0, getItemCount() - 1); } } /** * wraps notifyItemInserted * * @param position the global position */ public void notifyAdapterItemInserted(int position) { notifyAdapterItemRangeInserted(position, 1); } /** * wraps notifyItemRangeInserted * * @param position the global position * @param itemCount the count of items inserted */ public void notifyAdapterItemRangeInserted(int position, int itemCount) { //we have to update all current stored selection and expandable states in our map if (mPositionBasedStateManagement) { mSelections = AdapterUtil.adjustPosition(mSelections, position, Integer.MAX_VALUE, itemCount); mExpanded = AdapterUtil.adjustPosition(mExpanded, position, Integer.MAX_VALUE, itemCount); } cacheSizes(); notifyItemRangeInserted(position, itemCount); if (mPositionBasedStateManagement) { //we make sure the new items are displayed properly AdapterUtil.handleStates(this, position, position + itemCount - 1); } } /** * wraps notifyItemRemoved * * @param position the global position */ public void notifyAdapterItemRemoved(int position) { notifyAdapterItemRangeRemoved(position, 1); } /** * wraps notifyItemRangeRemoved * * @param position the global position * @param itemCount the count of items removed */ public void notifyAdapterItemRangeRemoved(int position, int itemCount) { //we have to update all current stored selection and expandable states in our map if (mPositionBasedStateManagement) { mSelections = AdapterUtil.adjustPosition(mSelections, position, Integer.MAX_VALUE, itemCount * (-1)); mExpanded = AdapterUtil.adjustPosition(mExpanded, position, Integer.MAX_VALUE, itemCount * (-1)); } cacheSizes(); notifyItemRangeRemoved(position, itemCount); } /** * wraps notifyItemMoved * * @param fromPosition the global fromPosition * @param toPosition the global toPosition */ public void notifyAdapterItemMoved(int fromPosition, int toPosition) { //collapse items we move. just in case :D collapse(fromPosition); collapse(toPosition); if (mPositionBasedStateManagement) { if (!mSelections.contains(fromPosition) && mSelections.contains(toPosition)) { mSelections.remove(toPosition); mSelections.add(fromPosition); } else if (mSelections.contains(fromPosition) && !mSelections.contains(toPosition)) { mSelections.remove(fromPosition); mSelections.add(toPosition); } } notifyItemMoved(fromPosition, toPosition); } /** * wraps notifyItemChanged * * @param position the global position */ public void notifyAdapterItemChanged(int position) { notifyAdapterItemChanged(position, null); } /** * wraps notifyItemChanged * * @param position the global position * @param payload additional payload */ public void notifyAdapterItemChanged(int position, Object payload) { notifyAdapterItemRangeChanged(position, 1, payload); } /** * wraps notifyItemRangeChanged * * @param position the global position * @param itemCount the count of items changed */ public void notifyAdapterItemRangeChanged(int position, int itemCount) { notifyAdapterItemRangeChanged(position, itemCount, null); } /** * wraps notifyItemRangeChanged * * @param position the global position * @param itemCount the count of items changed * @param payload an additional payload */ public void notifyAdapterItemRangeChanged(int position, int itemCount, Object payload) { for (int i = position; i < position + itemCount; i++) { if (mPositionBasedStateManagement) { if (mExpanded.indexOfKey(i) >= 0) { collapse(i); } } else { Item item = getItem(position); if (item instanceof IExpandable && ((IExpandable) item).isExpanded()) { collapse(position); } } } if (payload == null) { notifyItemRangeChanged(position, itemCount); } else { notifyItemRangeChanged(position, itemCount, payload); } if (mPositionBasedStateManagement) { //we make sure the new items are displayed properly AdapterUtil.handleStates(this, position, position + itemCount - 1); } } /** * notifies the fastAdapter about new / removed items within a sub hierarchy * NOTE this currently only works for sub items with only 1 level * * @param position the global position of the parent item */ public void notifyAdapterSubItemsChanged(int position) { //TODO ALSO CARE ABOUT SUB SUB ... HIRACHIES if (mPositionBasedStateManagement) { //we only need to do something if this item is expanded if (mExpanded.indexOfKey(position) > -1) { int previousCount = mExpanded.get(position); int itemsCount = notifyAdapterSubItemsChanged(position, previousCount); mExpanded.put(position, itemsCount); } } else { Log.e("FastAdapter", "please use the notifyAdapterSubItemsChanged(int position, int previousCount) method instead in the PositionBasedStateManagement mode, as we are not able to calculate the previous count "); } } /** * notifies the fastAdapter about new / removed items within a sub hierarchy * NOTE this currently only works for sub items with only 1 level * * @param position the global position of the parent item * @param previousCount the previous count of sub items * @return the new count of subItems */ public int notifyAdapterSubItemsChanged(int position, int previousCount) { Item item = getItem(position); if (item != null && item instanceof IExpandable) { IExpandable expandable = (IExpandable) item; IAdapter adapter = getAdapter(position); if (adapter != null && adapter instanceof IItemAdapter) { ((IItemAdapter) adapter).removeRange(position + 1, previousCount); ((IItemAdapter) adapter).add(position + 1, expandable.getSubItems()); } return expandable.getSubItems().size(); } return 0; } //listeners public interface OnTouchListener<Item extends IItem> { /** * the onTouch event of a specific item inside the RecyclerView * * @param v the view we clicked * @param event the touch event * @param adapter the adapter which is responsible for the given item * @param item the IItem which was clicked * @param position the global position * @return return true if the event was consumed, otherwise false */ boolean onTouch(View v, MotionEvent event, IAdapter<Item> adapter, Item item, int position); } public interface OnClickListener<Item extends IItem> { /** * the onClick event of a specific item inside the RecyclerView * * @param v the view we clicked * @param adapter the adapter which is responsible for the given item * @param item the IItem which was clicked * @param position the global position * @return return true if the event was consumed, otherwise false */ boolean onClick(View v, IAdapter<Item> adapter, Item item, int position); } public interface OnLongClickListener<Item extends IItem> { /** * the onLongClick event of a specific item inside the RecyclerView * * @param v the view we clicked * @param adapter the adapter which is responsible for the given item * @param item the IItem which was clicked * @param position the global position * @return return true if the event was consumed, otherwise false */ boolean onLongClick(View v, IAdapter<Item> adapter, Item item, int position); } public interface OnCreateViewHolderListener { /** * is called inside the onCreateViewHolder method and creates the viewHolder based on the provided viewTyp * * @param parent the parent which will host the View * @param viewType the type of the ViewHolder we want to create * @return the generated ViewHolder based on the given viewType */ RecyclerView.ViewHolder onPreCreateViewHolder(ViewGroup parent, int viewType); /** * is called after the viewHolder was created and the default listeners were added * * @param viewHolder the created viewHolder after all listeners were set * @return the viewHolder given as param */ RecyclerView.ViewHolder onPostCreateViewHolder(RecyclerView.ViewHolder viewHolder); } /** * default implementation of the OnCreateViewHolderListener */ public class OnCreateViewHolderListenerImpl implements OnCreateViewHolderListener { /** * is called inside the onCreateViewHolder method and creates the viewHolder based on the provided viewTyp * * @param parent the parent which will host the View * @param viewType the type of the ViewHolder we want to create * @return the generated ViewHolder based on the given viewType */ @Override public RecyclerView.ViewHolder onPreCreateViewHolder(ViewGroup parent, int viewType) { return getTypeInstance(viewType).getViewHolder(parent); } /** * is called after the viewHolder was created and the default listeners were added * * @param viewHolder the created viewHolder after all listeners were set * @return the viewHolder given as param */ @Override public RecyclerView.ViewHolder onPostCreateViewHolder(RecyclerView.ViewHolder viewHolder) { return viewHolder; } } public interface OnBindViewHolderListener { /** * is called in onBindViewHolder to bind the data on the ViewHolder * * @param viewHolder the viewHolder for the type at this position * @param position the position of thsi viewHolder */ void onBindViewHolder(RecyclerView.ViewHolder viewHolder, int position); } public class OnBindViewHolderListenerImpl implements OnBindViewHolderListener { /** * is called in onBindViewHolder to bind the data on the ViewHolder * * @param viewHolder the viewHolder for the type at this position * @param position the position of this viewHolder */ @Override public void onBindViewHolder(RecyclerView.ViewHolder viewHolder, int position) { getItem(position).bindView(viewHolder); } } /** * an internal class to return the IItem and relativePosition and it's adapter at once. used to save one iteration inside the getInternalItem method */ public static class RelativeInfo<Item extends IItem> { public IAdapter<Item> adapter = null; public Item item = null; public int position = -1; } }
* fix getExpandedItemsCount in the new StateManagment
library/src/main/java/com/mikepenz/fastadapter/FastAdapter.java
* fix getExpandedItemsCount in the new StateManagment
<ide><path>ibrary/src/main/java/com/mikepenz/fastadapter/FastAdapter.java <ide> } else { <ide> //first we find out how many items were added in total <ide> //also counting subItems <del> for (int i = position; i < position + totalAddedItems; i++) { <add> for (int i = from; i < position; i++) { <ide> Item tmp = getItem(i); <ide> if (tmp instanceof IExpandable) { <ide> IExpandable tmpExpandable = ((IExpandable) tmp);
JavaScript
mpl-2.0
ab90a3490aa7f39280a74b65710d5482837a59c1
0
Mitch-1-2/uniqtabs,Mitch-1-2/uniqtabs
/* * @author Mitchell Field <[email protected]> * * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ // Set browser action. browser.browserAction.onClicked.addListener(onBrowserAction); // Set browser action default title. browser.browserAction.setTitle({ title: browser.i18n.getMessage("browser_action_none_label") }); // Set browser action default shortcut description. browser.commands.update({ name: "_execute_browser_action", description: browser.i18n.getMessage("browser_action_shortcut_none_label") }); // Listen to changes in storage. browser.storage.onChanged.addListener(onStorageChanged); const DISCARDABLE_TAB_URLS = new Set([ "about:blank", "about:newtab", "about:privatebrowsing" ]); const SORT_MODES = new Map([ ["none", 0], ["host_title_path", 1], ["host_path_title", 2], ["title_host_path", 3] ]); const DEFAULT_PREFS = { "pref_tabs_deduplicate": "false", "pref_tabs_sort_by_parts": "none", "pref_tabs_sort_by_query_string": "true" }; const PREFS = Object.assign(DEFAULT_PREFS); const processingWindows = new Set(); const hostnameTokenCache = new Map(); const pathnameTokenCache = new Map(); function TabProps(tab) { const { id, index, status, title, url, windowId } = tab; const { protocol = ':', hostname = '', pathname = '/', searchParams = '', hash = '#' } = new URL(url || ''); Object.assign(this, { _lowerDomainTokens: null, _pathnameTokens: null, _tldTokens: null, hasAboutScheme: protocol === "about", hasPathname: pathname !== '/', hash, hostname, id, index, isDiscardable: DISCARDABLE_TAB_URLS.has(url), isDuplicate: false, pathname, queryString: searchParams.toString(), status, tab, title: title || '', url, windowId }); } TabProps.prototype = { get lowerDomainTokens() { if (!this._lowerDomainTokens) { const hostnameTokens = splitHostname(this.hostname, this.windowId); this._tldTokens = hostnameTokens[0]; this._lowerDomainTokens = hostnameTokens[1]; } return this._lowerDomainTokens; }, get pathnameTokens() { if (!this._pathnameTokens) { this._pathnameTokens = splitPathname(this.pathname, this.windowId); } return this._pathnameTokens; }, get tldTokens() { if (!this._tldTokens) { const hostnameTokens = splitHostname(this.hostname, this.windowId); this._tldTokens = hostnameTokens[0]; this._lowerDomainTokens = hostnameTokens[1]; } return this._tldTokens; } } /* * Called when the "browser action" is invoked. */ function onBrowserAction(tab, onClickData) { const sort = PREFS.pref_tabs_sort_by_parts !== "none"; const deduplicate = PREFS.pref_tabs_deduplicate === "true"; if (!sort && !deduplicate) { // The browser action is not configured to sort nor deduplicate tabs. browser.runtime.openOptionsPage(); return; } processTabs(tab.windowId, sort, deduplicate); } /* * Called when a storage area is changed. */ function onStorageChanged(changes, areaName) { if (areaName !== "sync" || !("preferences" in changes)) { return; } Object.assign(PREFS, changes.preferences.newValue); let sort = PREFS.pref_tabSortByParts !== "none"; let deduplicate = PREFS.pref_tabDeduplicate === "true"; // Default browser action title. let titleID = "browser_action_none_label"; let shortcutDescriptionID = "browser_action_shortcut_none_label"; if (sort) { if (deduplicate) { // Sort and deduplicate. titleID = "browser_action_sort_deduplicate_label"; shortcutDescriptionID = "browser_action_shortcut_sort_deduplicate_label"; } else { // Sort. titleID = "browser_action_sort_label"; shortcutDescriptionID = "browser_action_shortcut_sort_label"; } } else if (deduplicate) { // Deduplicate. titleID = "browser_action_deduplicate_label"; shortcutDescriptionID = "browser_action_deduplicate_label"; } // Set browser action title. browser.browserAction.setTitle({ title: browser.i18n.getMessage(titleID) }); // Set browser action shortcut description. browser.commands.update({ name: "_execute_browser_action", description: browser.i18n.getMessage(shortcutDescriptionID) }); } /* * Sorts, deduplicates, and removes low-priority (or blank) tabs. * * @param windowId window ID * @param sort sort tabs * @param deduplicate deduplicate tabs */ function processTabs(windowId, sort, deduplicate) { let tabPropsArray; const gettingTabs = browser.tabs.query({ pinned: false, windowId, }).then(unpinnedTabs => { // Check if window is already being sorted. if (processingWindows.has(windowId)) return; processingWindows.add(windowId); // Initialise various caches for the window. hostnameTokenCache.set(windowId, new Map()); pathnameTokenCache.set(windowId, new Map()); tabPropsArray = unpinnedTabs.map(unpinnedTab => new TabProps(unpinnedTab)); // Get sorting order for tabs. tabPropsArray.sort(compareTabs); if (!sort) return; // Get first tab index. const {index} = unpinnedTabs[0]; // Move tabs into place. return browser.tabs.move( tabPropsArray.map(tabProps => tabProps.id), { index }); }).then(() => { // Filter duplicate and discardable tabs. const unwantedTabs = tabPropsArray.filter(tabProps => tabProps.status === "complete" && (tabProps.isDiscardable || deduplicate && tabProps.isDuplicate)); // Check for discardable tabs. const hasDiscardableTabs = tabPropsArray.some(tabProps => tabProps.status === "complete" && tabProps.isDiscardable); if (hasDiscardableTabs) { // Create a new tab to remain after culling. const gettingNewTab = browser.tabs.create({ active: false, windowId: windowId }); // Remove tabs. return gettingNewTab.then( browser.tabs.remove(unwantedTabs.map(tab => tab.id))); } else { return browser.tabs.remove(unwantedTabs.map(tab => tab.id)); } }).then(removedTabs => { // Clear the window's caches. hostnameTokenCache.get(windowId).clear(); pathnameTokenCache.get(windowId).clear(); // Allow the window's tabs to be sorted again. processingWindows.delete(windowId); }); } /* * Compares tabs based on certain criteria. * * @param propsA first tab properties for comparison * @param propsB second tab properties for comparison * @return comparison numeric result */ function compareTabs(propsA, propsB) { // Map the string preference value to a number. let sortMode = SORT_MODES.get(PREFS.pref_tabSortByParts); let result; if ((result = propsA.hasAboutScheme - propsB.hasAboutScheme) !== 0) return result; if (sortMode === 3) { // title-host-path sorting. Compare titles. if ((result = propsA.title.localeCompare(propsB.title)) !== 0) return result; } // Compare hostnames. if ((result = compareTokens(propsA.lowerDomainTokens, propsB.lowerDomainTokens)) !== 0) return result; // Compare TLDs. if ((result = compareTokens(propsA.tldTokens, propsB.tldTokens)) !== 0) return result; // Compare pathlessness. if ((result = propsA.hasPathname - propsB.hasPathname) !== 0) return result; if (sortMode === 1) { // host-title-path sorting. Compare titles. if ((result = propsA.title.localeCompare(propsB.title)) !== 0) return result; } // Compare pathnames. result = compareTokens(propsA.pathnameTokens, propsB.pathnameTokens); if (result !== 0) return result; // Compare query strings. if (PREFS.pref_tabs_sort_by_query_string) { if ((result = propsA.queryString.localeCompare(propsB.queryString)) !== 0) return result; } // Compare hashes (fragments). if ((result = propsA.hash.localeCompare(propsB.hash)) !== 0) return result; if (sortMode === 2) { // host-path-title. Compare titles. if ((result = propsA.title.localeCompare(propsB.title)) !== 0) return result; } // The two tabs are considered duplicate. Mark the later tab as a duplicate. if (propsA.index < propsB.index) { propsB.isDuplicate = true; } else { propsA.isDuplicate = true; } return 0; } /* * Compares a list of tokens. * * Compare a token from one array to its corresponding token in the other array * until they sort differently or an array runs out of tokens to compare. * * @param tokensA first array of tokens for comparison * @param tokensB second array of tokens for comparison * @return comparison numeric result */ function compareTokens(tokensA, tokensB) { const tokensLengthA = tokensA.length; const tokensLengthB = tokensB.length; const shortestLength = Math.min(tokensLengthA, tokensLengthB); let result; for (let index = 0; index < shortestLength; ++index) { if ((result = tokensA[index].localeCompare(tokensB[index])) !== 0) break; } return result === 0 ? Math.sign(tokensLengthA - tokensLengthB) : result; } /* * Roughly split hostname into top-level and lower-level domains. * * Will set/get cache entries by hostname and window ID. * * @param hostname URL hostname * @param windowId ID of associated window * @return [top-level domain tokens, lower-level domain tokens] */ function splitHostname(hostname, windowId) { const tokensMap = hostnameTokenCache.get(windowId); let hostnameTokens = tokensMap.get(hostname); if (hostnameTokens) return hostnameTokens; const tokens = hostname.split('.').reverse(); const tokensLength = tokens.length; let splitIndex; if (tokensLength > 2) { splitIndex = tokens[1].length <= 3 ? 2 : 1; } else { splitIndex = 1; } hostnameTokens = [tokens.slice(0, splitIndex), tokens.slice(splitIndex)]; tokensMap.set(hostname, hostnameTokens); return hostnameTokens; } /* * Split pathname into tokens. * * Will set/get cache entries by pathname and window ID. * * @param hostname URL pathname * @param windowId ID of associated window * @return [pathname tokens] */ function splitPathname(pathname, windowId) { const tokensMap = pathnameTokenCache.get(windowId); let pathnameTokens = tokensMap.get(pathname); if (pathnameTokens) return pathnameTokens; pathnameTokens = pathname.split('/'); tokensMap.set(pathname, pathnameTokens); return pathnameTokens; }
webextension/background.js
/* * @author Mitchell Field <[email protected]> * * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ // Set browser action. browser.browserAction.onClicked.addListener(onBrowserAction); // Set browser action default title. browser.browserAction.setTitle({ title: browser.i18n.getMessage("browser_action_none_label") }); // Set browser action default shortcut description. browser.commands.update({ name: "_execute_browser_action", description: browser.i18n.getMessage("browser_action_shortcut_none_label") }); // Listen to changes in storage. browser.storage.onChanged.addListener(onStorageChanged); const DISCARDABLE_TAB_URLS = new Set([ "about:blank", "about:newtab", "about:privatebrowsing" ]); const SORT_MODES = new Map([ ["none", 0], ["host_title_path", 1], ["host_path_title", 2], ["title_host_path", 3] ]); const DEFAULT_PREFS = { "pref_tabs_deduplicate": "false", "pref_tabs_sort_by_parts": "none", "pref_tabs_sort_by_query_string": "true" }; const PREFS = Object.assign(DEFAULT_PREFS); const processingWindows = new Set(); const hostnameTokenCache = new Map(); const pathnameTokenCache = new Map(); function TabProps(tab) { const { id, index, status, title, url, windowId } = tab; const { protocol = ':', hostname = '', pathname = '/', searchParams = '', hash = '#' } = new URL(url || ''); Object.assign(this, { _lowerDomainTokens: null, _pathnameTokens: null, _tldTokens: null, hasAboutScheme: protocol === "about", hasPathname: pathname !== '/', hash, hostname, id, index, isDiscardable: DISCARDABLE_TAB_URLS.has(url), isDuplicate: false, pathname, queryString: searchParams.toString(), status, tab, title: title || '', url, windowId }); } TabProps.prototype = { get lowerDomainTokens() { if (!this._lowerDomainTokens) { const hostnameTokens = splitHostname(this.hostname, this.windowId); this._tldTokens = hostnameTokens[0]; this._lowerDomainTokens = hostnameTokens[1]; } return this._lowerDomainTokens; }, get pathnameTokens() { if (!this._pathnameTokens) { this._pathnameTokens = splitPathname(this.pathname, this.windowId); } return this._pathnameTokens; }, get tldTokens() { if (!this._tldTokens) { const hostnameTokens = splitHostname(this.hostname, this.windowId); this._tldTokens = hostnameTokens[0]; this._lowerDomainTokens = hostnameTokens[1]; } return this._tldTokens; } } /* * Called when the "browser action" is invoked. */ function onBrowserAction(tab, onClickData) { const sort = PREFS.pref_tabs_sort_by_parts !== "none"; const deduplicate = PREFS.pref_tabs_deduplicate === "true"; if (!sort && !deduplicate) { // The browser action is not configured to sort nor deduplicate tabs. browser.runtime.openOptionsPage(); return; } processTabs(tab.windowId, sort, deduplicate); } /* * Called when a storage area is changed. */ function onStorageChanged(changes, areaName) { if (areaName !== "sync" || !("preferences" in changes)) { return; } Object.assign(PREFS, changes.preferences.newValue); let sort = PREFS.pref_tabSortByParts !== "none"; let deduplicate = PREFS.pref_tabDeduplicate === "true"; // Default browser action title. let titleID = "browser_action_none_label"; let shortcutDescriptionID = "browser_action_shortcut_none_label"; if (sort) { if (deduplicate) { // Sort and deduplicate. titleID = "browser_action_sort_deduplicate_label"; shortcutDescriptionID = "browser_action_shortcut_sort_deduplicate_label"; } else { // Sort. titleID = "browser_action_sort_label"; shortcutDescriptionID = "browser_action_shortcut_sort_label"; } } else if (deduplicate) { // Deduplicate. titleID = "browser_action_deduplicate_label"; shortcutDescriptionID = "browser_action_deduplicate_label"; } // Set browser action title. browser.browserAction.setTitle({ title: browser.i18n.getMessage(titleID) }); // Set browser action shortcut description. browser.commands.update({ name: "_execute_browser_action", description: browser.i18n.getMessage(shortcutDescriptionID) }); } /* * Sorts, deduplicates, and removes low-priority (or blank) tabs. * * @param windowId window ID * @param sort sort tabs * @param deduplicate deduplicate tabs */ function processTabs(windowId, sort, deduplicate) { let tabPropsArray; const gettingTabs = browser.tabs.query({ pinned: false, windowId, }).then(unpinnedTabs => { // Check if window is already being sorted. if (processingWindows.has(windowId)) return; processingWindows.add(windowId); // Initialise various caches for the window. hostnameTokenCache.set(windowId, new Map()); pathnameTokenCache.set(windowId, new Map()); tabPropsArray = unpinnedTabs.map(unpinnedTab => new TabProps(unpinnedTab)); // Get sorting order for tabs. tabPropsArray.sort(compareTabs); if (!sort) return; // Get first tab index. const {index} = unpinnedTabs[0]; // Move tabs into place. return browser.tabs.move( tabPropsArray.map(tabProps => tabProps.id), { index }); }).then(() => { // Filter duplicate and discardable tabs. const unwantedTabs = tabPropsArray.filter(tabProps => tabProps.status === "complete" && (tabProps.isDiscardable || deduplicate && tabProps.isDuplicate)); // Check for discardable tabs. const hasDiscardableTabs = tabPropsArray.some(tabProps => tabProps.status === "complete" && tabProps.isDiscardable); if (hasDiscardableTabs) { // Create a new tab to remain after culling. const gettingNewTab = browser.tabs.create({ active: false, windowId: windowId }); // Remove tabs. return gettingNewTab.then( browser.tabs.remove(unwantedTabs.map(tab => tab.id))); } else { return browser.tabs.remove(unwantedTabs.map(tab => tab.id)); } }).then(removedTabs => { // Clear the window's caches. hostnameTokenCache.get(windowId).clear(); pathnameTokenCache.get(windowId).clear(); // Allow the window's tabs to be sorted again. processingWindows.delete(windowId); }); } /* * Compares tabs based on certain criteria. * * @param propsA first tab properties for comparison * @param propsB second tab properties for comparison * @return comparison numeric result */ function compareTabs(propsA, propsB) { // Map the string preference value to a number. let sortMode = SORT_MODES.get(PREFS.pref_tabSortByParts); let result; if ((result = propsA.hasAboutScheme - propsB.hasAboutScheme) !== 0) return result; if (sortMode === 3) { // title-host-path sorting. Compare titles. if ((result = propsA.title.localeCompare(propsB.title)) !== 0) return result; } // Compare hostnames. if ((result = compareTokens(propsA.lowerDomainTokens, propsB.lowerDomainTokens)) !== 0) return result; // Compare TLDs. if ((result = compareTokens(propsA.tldTokens, propsB.tldTokens)) !== 0) return result; // Compare pathlessness. if ((result = propsA.hasPathname - propsB.hasPathname) !== 0) return result; if (sortMode === 1) { // host-title-path sorting. Compare titles. if ((result = propsA.title.localeCompare(propsB.title)) !== 0) return result; } // Compare pathnames. result = compareTokens(propsA.pathnameTokens, propsB.pathnameTokens); if (result !== 0) return result; // Compare query strings. if (PREFS.pref_tabs_sort_by_query_string) { if ((result = propsA.queryString.localeCompare(propsB.queryString)) !== 0) return result; } // Compare hashes (fragments). if ((result = propsA.hash.localeCompare(propsB.hash)) !== 0) return result; if (sortMode === 2) { // host-path-title. Compare titles. if ((result = propsA.title.localeCompare(propsB.title)) !== 0) return result; } // The two tabs are considered duplicate. Mark the later tab as a duplicate. if (propsA.index < propsB.index) { propsB.isDuplicate = true; } else { propsA.isDuplicate = true; } return 0; } /* * Compares a list of tokens. * * Compare a token from one array to its corresponding token in the other array * until they sort differently or an array runs out of tokens to compare. * * @param tokensA first array of tokens for comparison * @param tokensB second array of tokens for comparison * @return comparison numeric result */ function compareTokens(tokensA, tokensB) { const tokensALength = tokensA.length; const tokensBLength = tokensB.length; for (let tokenIndex = 0; ; ++tokenIndex) { const isEndedA = tokenIndex >= tokensALength; // End of 'A' tokens. const isEndedB = tokenIndex >= tokensBLength; // End of 'B' tokens. if (isEndedA || isEndedB) return isEndedB - isEndedA; const tokenA = tokensA[tokenIndex]; const tokenB = tokensB[tokenIndex]; const result = tokenA.localeCompare(tokenB); if (result !== 0) return result; } return 0; } /* * Roughly split hostname into top-level and lower-level domains. * * Will set/get cache entries by hostname and window ID. * * @param hostname URL hostname * @param windowId ID of associated window * @return [top-level domain tokens, lower-level domain tokens] */ function splitHostname(hostname, windowId) { const tokensMap = hostnameTokenCache.get(windowId); let hostnameTokens = tokensMap.get(hostname); if (hostnameTokens) return hostnameTokens; const tokens = hostname.split('.').reverse(); const tokensLength = tokens.length; let splitIndex; if (tokensLength > 2) { splitIndex = tokens[1].length <= 3 ? 2 : 1; } else { splitIndex = 1; } hostnameTokens = [tokens.slice(0, splitIndex), tokens.slice(splitIndex)]; tokensMap.set(hostname, hostnameTokens); return hostnameTokens; } /* * Split pathname into tokens. * * Will set/get cache entries by pathname and window ID. * * @param hostname URL pathname * @param windowId ID of associated window * @return [pathname tokens] */ function splitPathname(pathname, windowId) { const tokensMap = pathnameTokenCache.get(windowId); let pathnameTokens = tokensMap.get(pathname); if (pathnameTokens) return pathnameTokens; pathnameTokens = pathname.split('/'); tokensMap.set(pathname, pathnameTokens); return pathnameTokens; }
Optimise compareTokens().
webextension/background.js
Optimise compareTokens().
<ide><path>ebextension/background.js <ide> */ <ide> function compareTokens(tokensA, tokensB) { <ide> <del> const tokensALength = tokensA.length; <del> const tokensBLength = tokensB.length; <del> <del> for (let tokenIndex = 0; ; ++tokenIndex) { <del> const isEndedA = tokenIndex >= tokensALength; // End of 'A' tokens. <del> const isEndedB = tokenIndex >= tokensBLength; // End of 'B' tokens. <del> <del> if (isEndedA || isEndedB) <del> return isEndedB - isEndedA; <del> <del> const tokenA = tokensA[tokenIndex]; <del> const tokenB = tokensB[tokenIndex]; <del> const result = tokenA.localeCompare(tokenB); <del> <del> if (result !== 0) <del> return result; <del> } <del> <del> return 0; <add> const tokensLengthA = tokensA.length; <add> const tokensLengthB = tokensB.length; <add> const shortestLength = Math.min(tokensLengthA, tokensLengthB); <add> let result; <add> <add> for (let index = 0; index < shortestLength; ++index) { <add> if ((result = tokensA[index].localeCompare(tokensB[index])) !== 0) <add> break; <add> } <add> <add> return result === 0 ? Math.sign(tokensLengthA - tokensLengthB) : result; <ide> } <ide> <ide>
Java
bsd-3-clause
53f2fabe1d88872d9c959cd2bbf9e2a8ca953be4
0
msf-oca-his/dhis2-core,hispindia/dhis2-Core,hispindia/dhis2-Core,msf-oca-his/dhis2-core,msf-oca-his/dhis2-core,msf-oca-his/dhis2-core,hispindia/dhis2-Core,hispindia/dhis2-Core,dhis2/dhis2-core,dhis2/dhis2-core,dhis2/dhis2-core,hispindia/dhis2-Core,dhis2/dhis2-core,dhis2/dhis2-core,msf-oca-his/dhis2-core
package org.hisp.dhis.webapi.utils; /* * Copyright (c) 2004-2020, University of Oslo * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * Neither the name of the HISP project nor the names of its contributors may * be used to endorse or promote products derived from this software without * specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ import org.hisp.dhis.analytics.DataQueryParams; import org.hisp.dhis.common.cache.CacheStrategy; import org.hisp.dhis.common.cache.Cacheability; import org.hisp.dhis.setting.SettingKey; import org.hisp.dhis.setting.SystemSettingManager; import org.hisp.dhis.webapi.DhisWebSpringTest; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.mock.web.MockHttpServletResponse; import javax.servlet.http.HttpServletResponse; import java.util.Calendar; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; /** * @author Stian Sandvold */ public class ContextUtilsTest extends DhisWebSpringTest { @Autowired private ContextUtils contextUtils; @Autowired private SystemSettingManager systemSettingManager; private HttpServletResponse response; @Before public void init() { response = new MockHttpServletResponse(); } @Test public void testConfigureResponseReturnsCorrectTypeAndNumberOfHeaders() { contextUtils.configureResponse( response, null, CacheStrategy.NO_CACHE, null, false ); String cacheControl = response.getHeader( "Cache-Control" ); // Make sure we just have 1 header: Cache-Control assertEquals( 1, response.getHeaderNames().size() ); assertNotNull( cacheControl ); } @Test public void testConfigureResponseReturnsCorrectHeaderValueForAllCacheStrategies() { contextUtils.configureResponse( response, null, CacheStrategy.NO_CACHE, null, false ); assertEquals( "no-cache", response.getHeader( "Cache-Control" ) ); response.reset(); contextUtils.configureResponse( response, null, CacheStrategy.CACHE_1_HOUR, null, false ); assertEquals( "max-age=3600, public", response.getHeader( "Cache-Control" ) ); response.reset(); contextUtils.configureResponse( response, null, CacheStrategy.CACHE_15_MINUTES, null, false ); assertEquals( "max-age=900, public", response.getHeader( "Cache-Control" ) ); response.reset(); contextUtils.configureResponse( response, null, CacheStrategy.CACHE_TWO_WEEKS, null, false ); assertEquals( "max-age=1209600, public", response.getHeader( "Cache-Control" ) ); systemSettingManager.saveSystemSetting( SettingKey.CACHE_STRATEGY, SettingKey.getAsRealClass( SettingKey.CACHE_STRATEGY.getName(), CacheStrategy.CACHE_1_HOUR.toString() ) ); response.reset(); contextUtils.configureResponse( response, null, CacheStrategy.RESPECT_SYSTEM_SETTING, null, false ); assertEquals( "max-age=3600, public", response.getHeader( "Cache-Control" ) ); } @Test public void testConfigureResponseReturnsCorrectCacheabilityInHeader() { // Set to public; is default systemSettingManager.saveSystemSetting( SettingKey.CACHEABILITY, Cacheability.PUBLIC ); contextUtils.configureResponse( response, null, CacheStrategy.CACHE_1_HOUR, null, false ); assertEquals( "max-age=3600, public", response.getHeader( "Cache-Control" ) ); // Set to private systemSettingManager.saveSystemSetting( SettingKey.CACHEABILITY, Cacheability.PRIVATE ); response.reset(); contextUtils.configureResponse( response, null, CacheStrategy.CACHE_1_HOUR, null, false ); assertEquals( "max-age=3600, private", response.getHeader( "Cache-Control" ) ); } @Test public void testConfigureAnalyticsResponseReturnsCorrectCacheHeaders() { Calendar thisYear = Calendar.getInstance(); Calendar fiveYearBack = Calendar.getInstance(); fiveYearBack.add( Calendar.YEAR, -5 ); DataQueryParams withinThreshold = DataQueryParams.newBuilder().withEndDate( thisYear.getTime() ).build(); DataQueryParams outsideThreshold = DataQueryParams.newBuilder().withEndDate( fiveYearBack.getTime() ).build(); systemSettingManager.saveSystemSetting( SettingKey.CACHE_ANALYTICS_DATA_YEAR_THRESHOLD, 3 ); response.reset(); contextUtils.configureAnalyticsResponse( response, null, CacheStrategy.CACHE_1_HOUR, null, false, withinThreshold.getLatestEndDate() ); assertEquals( "no-cache", response.getHeader( "Cache-Control" ) ); response.reset(); contextUtils.configureAnalyticsResponse( response, null, CacheStrategy.CACHE_1_HOUR, null, false, outsideThreshold.getLatestEndDate() ); assertEquals( "max-age=3600, public", response.getHeader( "Cache-Control" ) ); } @Test public void testGetAttachmentFileNameNull() { Assert.assertNull( ContextUtils.getAttachmentFileName( null ) ); } @Test public void testGetAttachmentFileNameInline() { Assert.assertNull( ContextUtils.getAttachmentFileName( "inline; filename=test.txt" ) ); } @Test public void testGetAttachmentFileName() { Assert.assertEquals( "test.txt", ContextUtils.getAttachmentFileName( "attachment; filename=test.txt" ) ); } }
dhis-2/dhis-web/dhis-web-api-test/src/test/java/org/hisp/dhis/webapi/utils/ContextUtilsTest.java
package org.hisp.dhis.webapi.utils; /* * Copyright (c) 2004-2020, University of Oslo * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * Neither the name of the HISP project nor the names of its contributors may * be used to endorse or promote products derived from this software without * specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ import org.hisp.dhis.analytics.DataQueryParams; import org.hisp.dhis.common.cache.CacheStrategy; import org.hisp.dhis.common.cache.Cacheability; import org.hisp.dhis.setting.SettingKey; import org.hisp.dhis.setting.SystemSettingManager; import org.hisp.dhis.webapi.DhisWebSpringTest; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.mock.web.MockHttpServletResponse; import javax.servlet.http.HttpServletResponse; import java.util.Calendar; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; /** * @author Stian Sandvold */ public class ContextUtilsTest extends DhisWebSpringTest { @Autowired private ContextUtils contextUtils; @Autowired private SystemSettingManager systemSettingManager; private HttpServletResponse response; @Before public void init() { response = new MockHttpServletResponse(); } @Test public void testConfigureResponseReturnsCorrectTypeAndNumberOfHeaders() { contextUtils.configureResponse( response, null, CacheStrategy.NO_CACHE, null, false ); String cacheControl = response.getHeader( "Cache-Control" ); // Make sure we just have 1 header: Cache-Control assertEquals( 1, response.getHeaderNames().size() ); assertNotNull( cacheControl ); } @Test public void testConfigureResponseReturnsCorrectHeaderValueForAllCacheStrategies() { contextUtils.configureResponse( response, null, CacheStrategy.NO_CACHE, null, false ); assertEquals( "no-cache", response.getHeader( "Cache-Control" ) ); response.reset(); contextUtils.configureResponse( response, null, CacheStrategy.CACHE_1_HOUR, null, false ); assertEquals( "max-age=3600, public", response.getHeader( "Cache-Control" ) ); response.reset(); contextUtils.configureResponse( response, null, CacheStrategy.CACHE_15_MINUTES, null, false ); assertEquals( "max-age=900, public", response.getHeader( "Cache-Control" ) ); response.reset(); contextUtils.configureResponse( response, null, CacheStrategy.CACHE_TWO_WEEKS, null, false ); assertEquals( "max-age=1209600, public", response.getHeader( "Cache-Control" ) ); systemSettingManager.saveSystemSetting( SettingKey.CACHE_STRATEGY, SettingKey.getAsRealClass( SettingKey.CACHE_STRATEGY.getName(), CacheStrategy.CACHE_1_HOUR.toString() ) ); response.reset(); contextUtils.configureResponse( response, null, CacheStrategy.RESPECT_SYSTEM_SETTING, null, false ); assertEquals( "max-age=3600, public", response.getHeader( "Cache-Control" ) ); } @Test public void testConfigureResponseReturnsCorrectCacheabilityInHeader() { // Set to public; is default systemSettingManager.saveSystemSetting( SettingKey.CACHEABILITY, Cacheability.PUBLIC ); contextUtils.configureResponse( response, null, CacheStrategy.CACHE_1_HOUR, null, false ); assertEquals( "max-age=3600, public", response.getHeader( "Cache-Control" ) ); // Set to private systemSettingManager.saveSystemSetting( SettingKey.CACHEABILITY, Cacheability.PRIVATE ); response.reset(); contextUtils.configureResponse( response, null, CacheStrategy.CACHE_1_HOUR, null, false ); assertEquals( "max-age=3600, private", response.getHeader( "Cache-Control" ) ); } @Test public void testConfigureAnalyticsResponseReturnsCorrectCacheHeaders() { Calendar thisYear = Calendar.getInstance(); Calendar fiveYearBack = Calendar.getInstance(); thisYear.set( 2017, 01, 01 ); fiveYearBack.set( 2012, 01, 01 ); DataQueryParams withinThreshold = DataQueryParams.newBuilder().withEndDate( thisYear.getTime() ).build(); DataQueryParams outsideThreshold = DataQueryParams.newBuilder().withEndDate( fiveYearBack.getTime() ).build(); systemSettingManager.saveSystemSetting( SettingKey.CACHE_ANALYTICS_DATA_YEAR_THRESHOLD, 3 ); response.reset(); contextUtils.configureAnalyticsResponse( response, null, CacheStrategy.CACHE_1_HOUR, null, false, withinThreshold.getLatestEndDate() ); assertEquals( "no-cache", response.getHeader( "Cache-Control" ) ); response.reset(); contextUtils.configureAnalyticsResponse( response, null, CacheStrategy.CACHE_1_HOUR, null, false, outsideThreshold.getLatestEndDate() ); assertEquals( "max-age=3600, public", response.getHeader( "Cache-Control" ) ); } @Test public void testGetAttachmentFileNameNull() { Assert.assertNull( ContextUtils.getAttachmentFileName( null ) ); } @Test public void testGetAttachmentFileNameInline() { Assert.assertNull( ContextUtils.getAttachmentFileName( "inline; filename=test.txt" ) ); } @Test public void testGetAttachmentFileName() { Assert.assertEquals( "test.txt", ContextUtils.getAttachmentFileName( "attachment; filename=test.txt" ) ); } }
fix: ContextUtilTest to use dynamic date assignment (#4799)
dhis-2/dhis-web/dhis-web-api-test/src/test/java/org/hisp/dhis/webapi/utils/ContextUtilsTest.java
fix: ContextUtilTest to use dynamic date assignment (#4799)
<ide><path>his-2/dhis-web/dhis-web-api-test/src/test/java/org/hisp/dhis/webapi/utils/ContextUtilsTest.java <ide> Calendar thisYear = Calendar.getInstance(); <ide> Calendar fiveYearBack = Calendar.getInstance(); <ide> <del> thisYear.set( 2017, 01, 01 ); <del> fiveYearBack.set( 2012, 01, 01 ); <add> fiveYearBack.add( Calendar.YEAR, -5 ); <ide> <ide> DataQueryParams withinThreshold = DataQueryParams.newBuilder().withEndDate( thisYear.getTime() ).build(); <ide> DataQueryParams outsideThreshold = DataQueryParams.newBuilder().withEndDate( fiveYearBack.getTime() ).build();
Java
bsd-3-clause
699000a3083325e0926957b838597852deb161e4
0
lockss/lockss-daemon,lockss/lockss-daemon,edina/lockss-daemon,lockss/lockss-daemon,edina/lockss-daemon,lockss/lockss-daemon,edina/lockss-daemon,lockss/lockss-daemon,edina/lockss-daemon,lockss/lockss-daemon,lockss/lockss-daemon,edina/lockss-daemon,edina/lockss-daemon,edina/lockss-daemon
/* * $Id: Tdb.java,v 1.12 2011-01-07 19:53:06 pgust Exp $ */ /* Copyright (c) 2000-2010 Board of Trustees of Leland Stanford Jr. University, all rights reserved. Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: n The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL STANFORD UNIVERSITY BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. Except as contained in this notice, the name of Stanford University shall not be used in advertising or otherwise to promote the sale, use or other dealings in this Software without prior written authorization from Stanford University. */ package org.lockss.config; import java.io.*; import java.util.*; import org.lockss.util.*; /** * This class represents a title database (TDB). The TDB consists of * hierarchy of <code>TdbPublisher</code>s, and <code>TdbAu</code>s. * Special indexing provides fast access to all <code>TdbAu</code>s for * a specified plugin ID. * * @author Philip Gust * @version $Id: Tdb.java,v 1.12 2011-01-07 19:53:06 pgust Exp $ */ public class Tdb { /** * This exception is thrown by Tdb related classes in place of an * unchecked IllegalStateException when an operation cannot be * performed because it is incompatible with state of the Tdb. * <p> * This class inherits from IOException to avoid having higher * level routines that already have to handle IOException when * creating and copying Configuration objects from having to * also handle this exception. * * @author Philip Gust * @version $Id: Tdb.java,v 1.12 2011-01-07 19:53:06 pgust Exp $ */ @SuppressWarnings("serial") static public class TdbException extends Exception { /** * Constructs a new exception with the specified detail message. The * cause is not initialized, and may subsequently be initialized by * a call to {@link #initCause}. * * @param message the detail message. The detail message is saved for * later retrieval by the {@link #getMessage()} method. */ public TdbException(String message) { super(message); } /** * Constructs a new exception with the specified detail message and * cause. <p>Note that the detail message associated with * <code>cause</code> is <i>not</i> automatically incorporated in * this exception's detail message. * * @param message the detail message (which is saved for later retrieval * by the {@link #getMessage()} method). * @param cause the cause (which is saved for later retrieval by the * {@link #getCause()} method). (A <tt>null</tt> value is * permitted, and indicates that the cause is nonexistent or * unknown.) * @since 1.4 */ public TdbException(String message, Throwable cause) { super(message, cause); } /** * Constructs a new exception with the specified cause and a detail * message of <tt>(cause==null ? null : cause.toString())</tt> (which * typically contains the class and detail message of <tt>cause</tt>). * This constructor is useful for exceptions that are little more than * wrappers for other throwables (for example, {@link * java.security.PrivilegedActionException}). * * @param cause the cause (which is saved for later retrieval by the * {@link #getCause()} method). (A <tt>null</tt> value is * permitted, and indicates that the cause is nonexistent or * unknown.) * @since 1.4 */ public TdbException(Throwable cause) { super(cause); } } /** * Register the au with this Tdb for its plugin. * * @param au the TdbAu * @return <code>false</code> if already registered, otherwise <code>true</code> */ private boolean addTdbAuForPlugin(TdbAu au) { // add AU to list for plugins String pluginId = au.getPluginId(); Collection<TdbAu> aus = pluginIdTdbAusMap.get(pluginId); if (aus == null) { aus = new HashSet<TdbAu>(); pluginIdTdbAusMap.put(pluginId, aus); } if (!aus.add(au)) { return false; } // increment the total AU count; tdbAuCount++; return true; } /** * Unregister the au with this Tdb for its plugin. * * @param au the TdbAu * @return <code>false</code> if au was not registered, otherwise <code>true</code> */ private boolean removeTdbAuForPlugin(TdbAu au) { // if can't add au to title, we need to undo the au // registration and re-throw the exception we just caught String pluginId = au.getPluginId(); Collection<TdbAu> c = pluginIdTdbAusMap.get(pluginId); if (c.remove(au)) { if (c.isEmpty()) { pluginIdTdbAusMap.remove(c); } tdbAuCount--; return true; } return false; } /** * Add a new TdbAu to this title database. The TdbAu must have * its pluginID, and title set. The TdbAu''s title must also have * its titleId and publisher set. The publisher name must be unique * to all publishers in this Tdb. * * @param au the TdbAu to add. * @throws TdbException if Tdb is sealed, this is a duplicate au, or * the au's publisher is a duplicate */ public void addTdbAu(TdbAu au) throws TdbException { if (au == null) { throw new IllegalArgumentException("TdbAu cannot be null"); } // verify not sealed if (isSealed()) { throw new TdbException("Cannot add TdbAu to sealed Tdb"); } // validate title TdbTitle title = au.getTdbTitle(); if (title == null) { throw new IllegalArgumentException("TdbAu's title not set"); } // validate publisher TdbPublisher publisher = title.getTdbPublisher(); if (publisher == null) { throw new IllegalArgumentException("TdbAu's publisher not set"); } // make sure publisher is not a duplicate String pubName = publisher.getName(); TdbPublisher oldPublisher = tdbPublisherMap.put(pubName, publisher); if ((oldPublisher != null) && (oldPublisher != publisher)) { // restore old publisher and report error tdbPublisherMap.put(pubName, oldPublisher); throw new TdbException("New au publisher with duplicate name: " + pubName); } // register the au with this instance if (!addTdbAuForPlugin(au)) { // remove new publisher and report error if (oldPublisher == null) { tdbPublisherMap.remove(pubName); } throw new TdbException("Cannot register au " + au.getName()); } } /** * Set up logger */ protected final static Logger logger = Logger.getLogger("Tdb"); /** * A map of AUs per plugin, for this configuration * (provides faster access for Plugins) */ private final Map<String, Collection<TdbAu>> pluginIdTdbAusMap = new HashMap<String,Collection<TdbAu>>(); /** * Map of publisher names to TdBPublishers for this configuration */ private final Map<String, TdbPublisher> tdbPublisherMap = new HashMap<String,TdbPublisher>(); /** * Determines whether more AUs can be added. */ private boolean isSealed = false; /** * The total number of TdbAus in this TDB (sum of collections in pluginIdTdbAus map */ private int tdbAuCount = 0; /** * Prefix appended to generated unknown title */ private static final String UNKNOWN_TITLE_PREFIX = "Title of "; /** * Prefix appended to generated unknown publisher */ private static final String UNKNOWN_PUBLISHER_PREFIX = "Publisher of "; /** * Seals a Tdb against further additions. */ public void seal() { if (!isSealed) { isSealed = true; // convert map values to array lists to save space because // they will not be modified now that the Tdb is sealed. synchronized(pluginIdTdbAusMap) { for (Map.Entry<String, Collection<TdbAu>> entry : pluginIdTdbAusMap.entrySet()) { ArrayList<TdbAu> list = new ArrayList<TdbAu>(entry.getValue()); list.trimToSize(); entry.setValue(list); } } } } /** * Determines whether this Tdb is sealed. * * @return <code>true</code> if sealed */ public boolean isSealed() { return isSealed; } /** * Determines whether the title database is empty. * * @return <code> true</code> if the title database has no entries */ public boolean isEmpty() { return pluginIdTdbAusMap.isEmpty(); } /** * Returns a collection of pluginIds for TdbAus that are * different from those in this Tdb. * * @param otherTdb a Tdb * @return a collection of pluginIds that are different, * , or all plugin Ids in this Tdb if otherTdb is <code>null</code> */ public Set<String> getPluginIdsForDifferences(Tdb otherTdb) { if (otherTdb == null) { return pluginIdTdbAusMap.keySet(); } if (otherTdb == this) { return Collections.emptySet(); } Set<String> pluginIds = new HashSet<String>(); addPluginIdsForDifferences (pluginIds, otherTdb); return pluginIds; } /** * Adds a collection of pluginIds for TdbAus that are * different from those in this Tdb. * * @param pluginIds the set of pluginIds * @param otherTdb a Tdb */ private void addPluginIdsForDifferences(Set<String> pluginIds, Tdb otherTdb) { Map<String, TdbPublisher> tdbPublishers = otherTdb.getAllTdbPublishers(); for (TdbPublisher tdbPublisher : tdbPublishers.values()) { if (!this.tdbPublisherMap.containsKey(tdbPublisher.getName())) { // add pluginIds for publishers in tdb that are not in this Tdb tdbPublisher.addAllPluginIds(pluginIds); } } for (TdbPublisher thisPublisher : tdbPublisherMap.values()) { TdbPublisher tdbPublisher = tdbPublishers.get(thisPublisher.getName()); if (tdbPublisher == null) { // add pluginIds for publisher in this Tdb that is not in tdb thisPublisher.addAllPluginIds(pluginIds); } else { // add pluginIds for publishers in both Tdbs that are different thisPublisher.addPluginIdsForDifferences(pluginIds, tdbPublisher); } } } /** * Determines two Tdbs are equal. Equality is based on having * equal TdbPublishers, and their child TdbTitles and TdbAus. * * @param o the other object * @return <code>true</code> iff they are equal Tdbs */ public boolean equals(Object o) { // check for identity if (this == o) { return true; } if (o instanceof Tdb) { try { // if no exception thrown, there are no differences // because the method did not try to modify the set addPluginIdsForDifferences(Collections.<String>emptySet(), (Tdb)o); return true; } catch (UnsupportedOperationException ex) { // differences because method tried to add to unmodifiable set } catch (IllegalArgumentException ex) { // if something was wrong with the other Tdb } catch (IllegalStateException ex) { // if something is wrong with this Tdb } } return false; } /** * Not supported for this class. * * @throws UnsupportedOperationException */ public int hashCode() { throw new UnsupportedOperationException(); } /** * Merge other Tdb into this one. Makes copies of otherTdb's non-duplicate * TdbPublisher, TdbTitle, and TdbAu objects and their non-duplicate children. * The object themselves are not merged. * * @param otherTdb the other Tdb * @throws TdbException if Tdb is sealed */ public void copyFrom(Tdb otherTdb) throws TdbException { // ignore inappropriate Tdb values if ((otherTdb == null) || (otherTdb == this)) { return; } if (isSealed()) { throw new TdbException("Cannot add otherTdb AUs to sealed Tdb"); } // merge non-duplicate publishers of otherTdb boolean tdbIsNew = tdbPublisherMap.isEmpty(); for (TdbPublisher otherPublisher : otherTdb.getAllTdbPublishers().values()) { String pubName = otherPublisher.getName(); TdbPublisher thisPublisher; boolean publisherIsNew = true; if (tdbIsNew) { // no need to check for existing publisher if TDB is new thisPublisher = new TdbPublisher(pubName); tdbPublisherMap.put(pubName, thisPublisher); } else { thisPublisher = tdbPublisherMap.get(pubName); publisherIsNew = (thisPublisher == null); if (publisherIsNew) { // copy publisher if not present in this Tdb thisPublisher = new TdbPublisher(pubName); tdbPublisherMap.put(pubName, thisPublisher); } } // merge non-duplicate titles of otherPublisher into thisPublisher for (TdbTitle otherTitle : otherPublisher.getTdbTitles()) { String otherId = otherTitle.getId(); TdbTitle thisTitle; boolean titleIsNew = true; if (publisherIsNew) { // no need to check for existing title if publisher is new thisTitle = otherTitle.copyForTdbPublisher(thisPublisher); thisPublisher.addTdbTitle(thisTitle); } else { thisTitle = thisPublisher.getTdbTitleById(otherId); titleIsNew = (thisTitle == null); if (titleIsNew) { // copy title if not present in this publisher thisTitle = otherTitle.copyForTdbPublisher(thisPublisher); thisPublisher.addTdbTitle(thisTitle); } else if (! thisTitle.getName().equals(otherTitle.getName())) { // error because it could lead to a missing title -- one probably has a typo // (what about checking other title elements too?) logger.error("Ignorning duplicate title entry: \"" + otherTitle.getName() + "\" with the same ID as \"" + thisTitle.getName() + "\""); } } // merge non-duplicate TdbAus of otherTitle into thisTitle for (TdbAu otherAu : otherTitle.getTdbAus()) { // no need to check for existing au if title is new String pluginId = otherAu.getPluginId(); if (titleIsNew || !getTdbAus(pluginId).contains(otherAu)) { // always succeeds we've already checked for duplicate TdbAu thisAu = otherAu.copyForTdbTitle(thisTitle); addTdbAuForPlugin(thisAu); } else { TdbAu thisAu = findExistingTdbAu(otherAu); if (!thisAu.getTdbTitle().getName().equals(otherAu.getTdbTitle().getName())) { if (!thisAu.getName().equals(otherAu.getName())) { logger.error("Ignorning duplicate au entry: \"" + otherAu.getName() + "\" for title \"" + otherAu.getTdbTitle().getName() + "\" with same definion as existing au entry: \"" + thisAu.getName() + "\" for title \"" + thisAu.getTdbTitle().getName() + "\""); } else { logger.error("Ignorning duplicate au entry: \"" + otherAu.getName() + "\" for title \"" + otherAu.getTdbTitle().getName() + "\" with same definion as existing one for title \"" + thisAu.getTdbTitle().getName() + "\""); } } else if (!thisAu.getName().equals(otherAu.getName())) { // error because it could lead to a missing AU -- one probably has a typo logger.error("Ignorning duplicate au entry: \"" + otherAu.getName() + "\" with the same definition as \"" + thisAu.getName() + "\" for title \"" + otherAu.getTdbTitle().getName()); } else { logger.warning("Ignoring duplicate au entry: \"" + otherAu.getName() + "\" for title \"" + otherAu.getTdbTitle().getName()); } } } } } } /** * Find existing TdbAu with same Id as another one. * @param otherAu another TdbAu * @return an existing TdbAu already in thisTdb */ protected TdbAu findExistingTdbAu(TdbAu otherAu) { // check for duplicate AU with same plugin for this Tdb Collection<TdbAu> aus = getTdbAus(otherAu.getPluginId()); for (TdbAu au : aus) { if (au.equals(otherAu)) { return au; } } return null; } /** * Returns a collection of TdbAus for the specified plugin ID. * <p> * Note: the returned collection should not be modified. * * @param pluginId the plugin ID * @return a collection of TdbAus for the plugin; <code>null</code> * if no TdbAus for the specified plugin in this configuration. */ public Collection<TdbAu> getTdbAus(String pluginId) { Collection<TdbAu> aus = pluginIdTdbAusMap.get(pluginId); return (aus != null) ? aus : Collections.<TdbAu>emptyList(); } /** * Returns the TdbAus for all plugin IDs. * <p> * Note: the returned map should not be modified. * * @return the TdbAus for all plugin IDs */ public Map<String, Collection<TdbAu>> getAllTdbAus() { return (pluginIdTdbAusMap != null) ? pluginIdTdbAusMap : Collections.<String,Collection<TdbAu>>emptyMap(); } /** * Return the number of TdbAus in this Tdb. * * @return the total TdbAu count */ public int getTdbAuCount() { return tdbAuCount; } /** * Return the number of TdbTitles in this Tdb. * * @return the total TdbTitle count */ public int getTdbTitleCount() { int titleCount = 0; for (TdbPublisher publisher : tdbPublisherMap.values()) { titleCount += publisher.getTdbTitleCount(); } return titleCount; } /** * Return the number of TdbPublishers in this Tdb. * * @return the total TdbPublisher count */ public int getTdbPublisherCount() { return tdbPublisherMap.size(); } /** * Add a new TdbAu from properties. This method recognizes * properties of the following form: * <pre> * Properties p = new Properties(); * p.setProperty("title", "Air & Space Volume 1)"); * p.setProperty("journalTitle", "Air and Space"); * p.setProperty("plugin", org.lockss.plugin.smithsonian); * p.setProperty("pluginVersion", "4"); * p.setProperty("issn", "0886-2257"); * p.setProperty("param.1.key", "volume"); * p.setProperty("param.1.value", "1"); * p.setProperty("param.2.key", "year"); * p.setProperty("param.2.value", "2001"); * p.setProperty("param.2.editable", "true"); * p.setProperty("param.3.key", "journal_id"); * p.setProperty("param.3.value", "0886-2257"); * p.setProperty("attributes.publisher", "Smithsonian Institution"); * </pre> * <p> * The "attributes.publisher" property is used to identify the publisher. * If a unique journalID is specified it is used to select among titles * for a publisher. A journalID can be specified indirectly through a * "journal_id" param or an "issn" property. If a journalId is not * specified, the "journalTitle" property is used to select the the title. * <p> * Properties other than "param", "attributes", "title", "journalTitle", * "journalId", and "plugin" are converted to attributes of the AU. Only * "title" and "plugin" are required properties. If "attributes.publisher" * or "journalTitle" are missing, their values are synthesized from the * "title" property. * * @param props a map of title properties * @return the TdbAu that was added * @throws TdbException if this Tdb is sealed, or the * AU already exists in this Tdb */ public TdbAu addTdbAuFromProperties(Properties props) throws TdbException { if (props == null) { throw new IllegalArgumentException("properties cannot be null"); } // verify not sealed if (isSealed()) { throw new TdbException("cannot add au to sealed TDB"); } TdbAu au = newTdbAu(props); addTdbAu(props, au); return au; } /** * Create a new TdbAu instance from the properties. * * @param props the properties * @return a TdbAu instance set built from the properties */ private TdbAu newTdbAu(Properties props) { String pluginId = (String)props.get("plugin"); if (pluginId == null) { throw new IllegalArgumentException("TdbAu plugin ID not specified"); } String auName = props.getProperty("title"); if (auName == null) { throw new IllegalArgumentException("TdbAu title not specified"); } // create a new TdbAu and set its elements TdbAu au = new TdbAu(auName, pluginId); // process attrs, and params Map<String, Map<String,String>> paramMap = new HashMap<String, Map<String,String>>(); for (Map.Entry<Object,Object> entry : props.entrySet()) { String key = String.valueOf(entry.getKey()); String value = String.valueOf(entry.getValue()); if (key.startsWith("attributes.")) { // set attributes directly String name = key.substring("attributes.".length()); try { au.setAttr(name, value); } catch (TdbException ex) { logger.warning("Cannot set attribute \"" + name + "\" with value \"" + value + "\" -- ignoring"); } } else if (key.startsWith("param.")) { // skip to param name String param = key.substring("param.".length()); int i; if ( ((i = param.indexOf(".key")) < 0) && ((i = param.indexOf(".value")) < 0)) { logger.warning("Ignoring unexpected param key for au \"" + auName + "\" key: \"" + key + "\" -- ignoring"); } else { // get param map for pname String pname = param.substring(0,i); Map<String,String> pmap = paramMap.get(pname); if (pmap == null) { pmap = new HashMap<String,String>(); paramMap.put(pname, pmap); } // add name and value to param map for pname String name = param.substring(i+1); pmap.put(name, value); } } else if ( !key.equals("title") // TdbAu has "name" property && !key.equals("plugin") // TdbAu has "pluginId" property && !key.equals("journalTitle") // TdbAu has "title" TdbTitle property && !key.startsWith("journal.")) { // TdbAu has "title" TdbTitle property // translate all other properties into AU properties try { au.setPropertyByName(key, value); } catch (TdbException ex) { logger.warning("Cannot set property \"" + key + "\" with value \"" + value + "\" -- ignoring"); } } } // set param from accumulated "key", and "value" entries for (Map<String, String> pmap : paramMap.values()) { String name = pmap.get("key"); String value = pmap.get("value"); if (name == null) { logger.warning("Ignoring property with null name"); } else if (value == null) { logger.warning("Ignoring property \"" + name + "\" with null value"); } else { try { au.setParam(name, value); } catch (TdbException ex) { logger.warning("Cannot set param \"" + name + "\" with value \"" + value + "\" -- ignoring"); } } } return au; } /** * Add a TdbAu to a TdbTitle and TdbPubisher, and add links to * the TdbTitle specified by the properties. * * @param props the properties * @param au the TdbAu to add * @throws TdbException if the AU already exists in this Tdb */ private void addTdbAu(Properties props, TdbAu au) throws TdbException { // add au for plugin assuming it is not a duplicate if (!addTdbAuForPlugin(au)) { // au already registered -- report existing au TdbAu existingAu = findExistingTdbAu(au); String titleName = getTdbTitleName(props, au); if (!titleName.equals(existingAu.getTdbTitle().getName())) { throw new TdbException( "Cannot add duplicate au entry: \"" + au.getName() + "\" for title \"" + titleName + "\" with same definition as existing au entry: \"" + existingAu.getName() + "\" for title \"" + existingAu.getTdbTitle().getName() + "\" to title database"); } else if (!existingAu.getName().equals(au.getName())) { // error because it could lead to a missing AU -- one probably has a typo throw new TdbException( "Cannot add duplicate au entry: \"" + au.getName() + "\" with the same definition as \"" + existingAu.getName() + "\" for title \"" + titleName + "\" to title database"); } else { throw new TdbException( "Cannot add duplicate au entry: \"" + au.getName() + "\" for title \"" + titleName + "\" to title database"); } } // get or create the TdbTitle for this TdbTitle title = getTdbTitle(props, au); try { // add AU to title title.addTdbAu(au); } catch (TdbException ex) { // if we can't add au to title, remove for plugin and re-throw exception removeTdbAuForPlugin(au); throw ex; } // process title links Map<String, Map<String,String>> linkMap = new HashMap<String, Map<String,String>>(); for (Map.Entry<Object,Object> entry : props.entrySet()) { String key = ""+entry.getKey(); String value = ""+entry.getValue(); if (key.startsWith("journal.link.")) { // skip to link name String param = key.substring("link.".length()); int i; if ( ((i = param.indexOf(".type")) < 0) && ((i = param.indexOf(".journalId")) < 0)) { logger.warning("Ignoring nexpected link key for au \"" + au.getName() + "\" key: \"" + key + "\""); } else { // get link map for linkName String lname = param.substring(0,i); Map<String,String> lmap = linkMap.get(lname); if (lmap == null) { lmap = new HashMap<String,String>(); linkMap.put(lname, lmap); } // add name and value to link map for link String name = param.substring(i+1); lmap.put(name, value); } } } // add links to title from accumulated "type", "journalId" entries for (Map<String, String> lmap : linkMap.values()) { String name = lmap.get("type"); String value = lmap.get("journalId"); if ((name != null) && (value != null)) { try { TdbTitle.LinkType linkType = TdbTitle.LinkType.valueOf(name); title.addLinkToTdbTitleId(linkType, value); } catch (IllegalArgumentException ex) { logger.warning("Ignoring unknown link type for au \"" + au.getName() + "\" name: \"" + name + "\""); } } } } /** * Get title ID from properties and TdbAu. * * @param props the properties * @param au the TdbAu * @return the title ID or <code>null</code> if not found */ private String getTdbTitleId(Properties props, TdbAu au) { // get the title ID from one of several props String titleId = props.getProperty("journal.id"); // proposed new property if (titleId == null) { // use "journal_id" param as title Id if not already set // proposed to replace with "journal.id" property titleId = au.getParam("journal_id"); } // use isbn property as title id if not already set if (titleId == null) { titleId = props.getProperty("isbn"); } // use eissn property as title id if not already set if (titleId == null) { titleId = props.getProperty("eissn"); } // use issn property as title id if not already set if (titleId == null) { titleId = props.getProperty("issn"); } return titleId; } /** * Get or create TdbTitle for the specified properties and TdbAu. * * @param props the properties * @param au the TdbAu * @return the corresponding TdbTitle */ private TdbTitle getTdbTitle(Properties props, TdbAu au) { TdbTitle title = null; // get publisher name String publisherNameFromProps = getTdbPublisherName(props, au); // get the title name String titleNameFromProps = getTdbTitleName(props, au); // get the title ID String titleIdFromProps = getTdbTitleId(props, au); String titleId = titleIdFromProps; if (titleId == null) { // generate a titleId if one not specified, using the // hash code of the combined title name and publisher names int hash = (titleNameFromProps + publisherNameFromProps).hashCode(); titleId = (hash < 0) ? ("id:1" +(-hash)) : ("id:0" + hash); } // get publisher specified by property name TdbPublisher publisher = tdbPublisherMap.get(publisherNameFromProps); if (publisher != null) { // find title from publisher title = publisher.getTdbTitleById(titleId); if (title != null) { // warn that title name is different if (!title.getName().equals(titleNameFromProps)) { logger.warning("Title for au \"" + au.getName() + "\": \"" + titleNameFromProps + "\" is different than existing title \"" + title.getName() + "\" for id " + titleId + " -- using existing title."); } return title; } } if (publisher == null) { // warn of missing publisher name if (publisherNameFromProps.startsWith(UNKNOWN_PUBLISHER_PREFIX)) { logger.warning("Publisher missing for au \"" + au.getName() + "\" -- using \"" + publisherNameFromProps + "\""); } // create new publisher for specified publisher name publisher = new TdbPublisher(publisherNameFromProps); tdbPublisherMap.put(publisherNameFromProps, publisher); } // warn of missing title name and/or id if (titleNameFromProps.startsWith(UNKNOWN_TITLE_PREFIX)) { logger.warning("Title missing for au \"" + au.getName() + "\" -- using \"" + titleNameFromProps + "\""); } if (titleIdFromProps == null) { logger.warning("Title ID missing for au \"" + au.getName() + "\" -- using " + titleId); } // create title and add to publisher title = new TdbTitle(titleNameFromProps, titleId); try { publisher.addTdbTitle(title); } catch (TdbException ex) { // shouldn't happen: title already exists in publisher logger.error(ex.getMessage(), ex); } return title; } /** * Get publisher name from properties and TdbAu. Creates a name * based on title name if not specified. * * @param props the properties * @param au the TdbAu * @return the publisher name */ private String getTdbPublisherName(Properties props, TdbAu au) { // use "publisher" attribute if specified, or synthesize from titleName. // proposed to replace with publisher.name property String publisherName = props.getProperty("attributes.publisher"); if (publisherName == null) { publisherName = props.getProperty("publisher.name"); // proposed new property } if (publisherName == null) { // create publisher name from title name if not specified String titleName = getTdbTitleName(props, au); publisherName = UNKNOWN_PUBLISHER_PREFIX + "[" + titleName + "]"; } return publisherName; } /** * Get the TdbTitle name from the properties. Fall back to a name * derived from the TdbAU name if not specified. * * @param props a group of properties * @param au the TdbAu * @return a TdbTitle name */ private String getTdbTitleName(Properties props, TdbAu au) { // use "journalTitle" prop if specified, or synthesize it // from auName and one of several properties String titleName = props.getProperty("journalTitle"); if (titleName == null) { titleName = props.getProperty("journal.title"); // proposed to replace journalTitle } if (titleName == null) { String issue = au.getParam("issue"); String year = au.getParam("year"); String volume = au.getParam("volume"); if (volume == null) { volume = au.getParam("volume_str"); } String auName = au.getName(); String auNameLC = auName.toLowerCase(); if ((volume != null) && auNameLC.endsWith(" vol " + volume)) { titleName = auName.substring(0, auName.length()-" vol ".length() - volume.length()); } else if ((volume != null) && auNameLC.endsWith(" volume " + volume)) { titleName = auName.substring(0, auName.length()-" volume ".length() - volume.length()); } else if ((issue != null) && auNameLC.endsWith(" issue " + issue)) { titleName = auName.substring(0, auName.length()-" issue ".length() - issue.length()); } else if ((year != null) && auNameLC.endsWith(" " + year)) { titleName = auName.substring(0, auName.length()-" ".length() - year.length()); } else { titleName = UNKNOWN_TITLE_PREFIX + "[" + auName + "]"; } } return titleName; } /** * Get the linked titles for the specified link type. * * @param linkType the link type {@see TdbTitle} for description of link types * @param title the TdbTitle with links * @return a collection of linked titles for the specified type */ public Collection<TdbTitle> getLinkedTdbTitlesForType(TdbTitle.LinkType linkType, TdbTitle title) { if (linkType == null) { throw new IllegalArgumentException("linkType cannot be null"); } if (title == null) { throw new IllegalArgumentException("title cannot be null"); } Collection<String> titleIds = title.getLinkedTdbTitleIdsForType(linkType); if (titleIds.isEmpty()) { return Collections.emptyList(); } ArrayList<TdbTitle> titles = new ArrayList<TdbTitle>(); for (String titleId : titleIds) { TdbTitle aTitle = getTdbTitleById(titleId); if (aTitle != null) { titles.add(aTitle); } } titles.trimToSize(); return titles; } /** * Get the title for the specified titleId. * * @param titleId the titleID * @return the title for the titleId or <code>null</code. if not found */ public TdbTitle getTdbTitleById(String titleId) { if (titleId == null) { throw new IllegalArgumentException("titleId cannot be null"); } TdbTitle title = null; for (TdbPublisher publisher : tdbPublisherMap.values()) { title = publisher.getTdbTitleById(titleId); if (title != null) { break; } } return title; } /** * Returns a collection of TdbTitles for the specified title name * across all publishers. * * @param titleName the title name * @return a collection of TdbTitles that match the title name */ public Collection<TdbTitle> getTdbTitlesByName(String titleName) { if (titleName == null) { return Collections.emptyList(); } ArrayList<TdbTitle> titles = new ArrayList<TdbTitle>(); for (TdbPublisher publisher : tdbPublisherMap.values()) { titles.addAll(publisher.getTdbTitlesByName(titleName)); } titles.trimToSize(); return titles; } /** * Get the publisher for the specified name. * * @param name the publisher name * @return the publisher, or <code>null</code> if not found */ public TdbPublisher getTdbPublisher(String name) { return (tdbPublisherMap != null) ? tdbPublisherMap.get(name) : null; } /** * Returns all TdbPubishers in this configuration. * <p> * Note: The returned map should not be modified. * * @return a map of publisher names to publishers */ public Map<String, TdbPublisher> getAllTdbPublishers() { return (tdbPublisherMap != null) ? tdbPublisherMap : Collections.<String,TdbPublisher>emptyMap(); } /** Print a full description of all elements in the Tdb */ public void prettyPrint(PrintStream ps) { ps.println("Tdb"); TreeMap<String, TdbPublisher> sorted = new TreeMap<String, TdbPublisher>(CatalogueOrderComparator.SINGLETON); sorted.putAll(getAllTdbPublishers()); for (TdbPublisher tdbPublisher : sorted.values()) { tdbPublisher.prettyPrint(ps, 2); } } }
src/org/lockss/config/Tdb.java
/* * $Id: Tdb.java,v 1.11 2010-08-15 13:23:40 pgust Exp $ */ /* Copyright (c) 2000-2010 Board of Trustees of Leland Stanford Jr. University, all rights reserved. Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: n The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL STANFORD UNIVERSITY BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. Except as contained in this notice, the name of Stanford University shall not be used in advertising or otherwise to promote the sale, use or other dealings in this Software without prior written authorization from Stanford University. */ package org.lockss.config; import java.io.*; import java.util.*; import org.lockss.util.*; /** * This class represents a title database (TDB). The TDB consists of * hierarchy of <code>TdbPublisher</code>s, and <code>TdbAu</code>s. * Special indexing provides fast access to all <code>TdbAu</code>s for * a specified plugin ID. * * @author Philip Gust * @version $Id: Tdb.java,v 1.11 2010-08-15 13:23:40 pgust Exp $ */ public class Tdb { /** * This exception is thrown by Tdb related classes in place of an * unchecked IllegalStateException when an operation cannot be * performed because it is incompatible with state of the Tdb. * <p> * This class inherits from IOException to avoid having higher * level routines that already have to handle IOException when * creating and copying Configuration objects from having to * also handle this exception. * * @author Philip Gust * @version $Id: Tdb.java,v 1.11 2010-08-15 13:23:40 pgust Exp $ */ @SuppressWarnings("serial") static public class TdbException extends Exception { /** * Constructs a new exception with the specified detail message. The * cause is not initialized, and may subsequently be initialized by * a call to {@link #initCause}. * * @param message the detail message. The detail message is saved for * later retrieval by the {@link #getMessage()} method. */ public TdbException(String message) { super(message); } /** * Constructs a new exception with the specified detail message and * cause. <p>Note that the detail message associated with * <code>cause</code> is <i>not</i> automatically incorporated in * this exception's detail message. * * @param message the detail message (which is saved for later retrieval * by the {@link #getMessage()} method). * @param cause the cause (which is saved for later retrieval by the * {@link #getCause()} method). (A <tt>null</tt> value is * permitted, and indicates that the cause is nonexistent or * unknown.) * @since 1.4 */ public TdbException(String message, Throwable cause) { super(message, cause); } /** * Constructs a new exception with the specified cause and a detail * message of <tt>(cause==null ? null : cause.toString())</tt> (which * typically contains the class and detail message of <tt>cause</tt>). * This constructor is useful for exceptions that are little more than * wrappers for other throwables (for example, {@link * java.security.PrivilegedActionException}). * * @param cause the cause (which is saved for later retrieval by the * {@link #getCause()} method). (A <tt>null</tt> value is * permitted, and indicates that the cause is nonexistent or * unknown.) * @since 1.4 */ public TdbException(Throwable cause) { super(cause); } } /** * Register the au with this Tdb for its plugin. * * @param au the TdbAu * @return <code>false</code> if already registered, otherwise <code>true</code> */ private boolean addTdbAuForPlugin(TdbAu au) { // add AU to list for plugins String pluginId = au.getPluginId(); Collection<TdbAu> aus = pluginIdTdbAusMap.get(pluginId); if (aus == null) { aus = new HashSet<TdbAu>(); pluginIdTdbAusMap.put(pluginId, aus); } if (!aus.add(au)) { return false; } // increment the total AU count; tdbAuCount++; return true; } /** * Unregister the au with this Tdb for its plugin. * * @param au the TdbAu * @return <code>false</code> if au was not registered, otherwise <code>true</code> */ private boolean removeTdbAuForPlugin(TdbAu au) { // if can't add au to title, we need to undo the au // registration and re-throw the exception we just caught String pluginId = au.getPluginId(); Collection<TdbAu> c = pluginIdTdbAusMap.get(pluginId); if (c.remove(au)) { if (c.isEmpty()) { pluginIdTdbAusMap.remove(c); } tdbAuCount--; return true; } return false; } /** * Add a new TdbAu to this title database. The TdbAu must have * its pluginID, and title set. The TdbAu''s title must also have * its titleId and publisher set. The publisher name must be unique * to all publishers in this Tdb. * * @param au the TdbAu to add. * @throws TdbException if Tdb is sealed, this is a duplicate au, or * the au's publisher is a duplicate */ public void addTdbAu(TdbAu au) throws TdbException { if (au == null) { throw new IllegalArgumentException("TdbAu cannot be null"); } // verify not sealed if (isSealed()) { throw new TdbException("Cannot add TdbAu to sealed Tdb"); } // validate title TdbTitle title = au.getTdbTitle(); if (title == null) { throw new IllegalArgumentException("TdbAu's title not set"); } // validate publisher TdbPublisher publisher = title.getTdbPublisher(); if (publisher == null) { throw new IllegalArgumentException("TdbAu's publisher not set"); } // make sure publisher is not a duplicate String pubName = publisher.getName(); TdbPublisher oldPublisher = tdbPublisherMap.put(pubName, publisher); if ((oldPublisher != null) && (oldPublisher != publisher)) { // restore old publisher and report error tdbPublisherMap.put(pubName, oldPublisher); throw new TdbException("New au publisher with duplicate name: " + pubName); } // register the au with this instance if (!addTdbAuForPlugin(au)) { // remove new publisher and report error if (oldPublisher == null) { tdbPublisherMap.remove(pubName); } throw new TdbException("Cannot register au " + au.getName()); } } /** * Set up logger */ protected final static Logger logger = Logger.getLogger("Tdb"); /** * A map of AUs per plugin, for this configuration * (provides faster access for Plugins) */ private final Map<String, Collection<TdbAu>> pluginIdTdbAusMap = new HashMap<String,Collection<TdbAu>>(); /** * Map of publisher names to TdBPublishers for this configuration */ private final Map<String, TdbPublisher> tdbPublisherMap = new HashMap<String,TdbPublisher>(); /** * Determines whether more AUs can be added. */ private boolean isSealed = false; /** * The total number of TdbAus in this TDB (sum of collections in pluginIdTdbAus map */ private int tdbAuCount = 0; /** * Prefix appended to generated unknown title */ private static final String UNKNOWN_TITLE_PREFIX = "Title of "; /** * Prefix appended to generated unknown publisher */ private static final String UNKNOWN_PUBLISHER_PREFIX = "Publisher of "; /** * Seals a Tdb against further additions. */ public void seal() { if (!isSealed) { isSealed = true; // convert map values to array lists to save space because // they will not be modified now that the Tdb is sealed. synchronized(pluginIdTdbAusMap) { for (Map.Entry<String, Collection<TdbAu>> entry : pluginIdTdbAusMap.entrySet()) { ArrayList<TdbAu> list = new ArrayList<TdbAu>(entry.getValue()); list.trimToSize(); entry.setValue(list); } } } } /** * Determines whether this Tdb is sealed. * * @return <code>true</code> if sealed */ public boolean isSealed() { return isSealed; } /** * Determines whether the title database is empty. * * @return <code> true</code> if the title database has no entries */ public boolean isEmpty() { return pluginIdTdbAusMap.isEmpty(); } /** * Returns a collection of pluginIds for TdbAus that are * different from those in this Tdb. * * @param otherTdb a Tdb * @return a collection of pluginIds that are different, * , or all plugin Ids in this Tdb if otherTdb is <code>null</code> */ public Set<String> getPluginIdsForDifferences(Tdb otherTdb) { if (otherTdb == null) { return pluginIdTdbAusMap.keySet(); } if (otherTdb == this) { return Collections.emptySet(); } Set<String> pluginIds = new HashSet<String>(); addPluginIdsForDifferences (pluginIds, otherTdb); return pluginIds; } /** * Adds a collection of pluginIds for TdbAus that are * different from those in this Tdb. * * @param pluginIds the set of pluginIds * @param otherTdb a Tdb */ private void addPluginIdsForDifferences(Set<String> pluginIds, Tdb otherTdb) { Map<String, TdbPublisher> tdbPublishers = otherTdb.getAllTdbPublishers(); for (TdbPublisher tdbPublisher : tdbPublishers.values()) { if (!this.tdbPublisherMap.containsKey(tdbPublisher.getName())) { // add pluginIds for publishers in tdb that are not in this Tdb tdbPublisher.addAllPluginIds(pluginIds); } } for (TdbPublisher thisPublisher : tdbPublisherMap.values()) { TdbPublisher tdbPublisher = tdbPublishers.get(thisPublisher.getName()); if (tdbPublisher == null) { // add pluginIds for publisher in this Tdb that is not in tdb thisPublisher.addAllPluginIds(pluginIds); } else { // add pluginIds for publishers in both Tdbs that are different thisPublisher.addPluginIdsForDifferences(pluginIds, tdbPublisher); } } } /** * Determines two Tdbs are equal. Equality is based on having * equal TdbPublishers, and their child TdbTitles and TdbAus. * * @param o the other object * @return <code>true</code> iff they are equal Tdbs */ public boolean equals(Object o) { // check for identity if (this == o) { return true; } if (o instanceof Tdb) { try { // if no exception thrown, there are no differences // because the method did not try to modify the set addPluginIdsForDifferences(Collections.<String>emptySet(), (Tdb)o); return true; } catch (UnsupportedOperationException ex) { // differences because method tried to add to unmodifiable set } catch (IllegalArgumentException ex) { // if something was wrong with the other Tdb } catch (IllegalStateException ex) { // if something is wrong with this Tdb } } return false; } /** * Not supported for this class. * * @throws UnsupportedOperationException */ public int hashCode() { throw new UnsupportedOperationException(); } /** * Merge other Tdb into this one. Makes copies of otherTdb's non-duplicate * TdbPublisher, TdbTitle, and TdbAu objects and their non-duplicate children. * The object themselves are not merged. * * @param otherTdb the other Tdb * @throws TdbException if Tdb is sealed */ public void copyFrom(Tdb otherTdb) throws TdbException { // ignore inappropriate Tdb values if ((otherTdb == null) || (otherTdb == this)) { return; } if (isSealed()) { throw new TdbException("Cannot add otherTdb AUs to sealed Tdb"); } // merge non-duplicate publishers of otherTdb boolean tdbIsNew = tdbPublisherMap.isEmpty(); for (TdbPublisher otherPublisher : otherTdb.getAllTdbPublishers().values()) { String pubName = otherPublisher.getName(); TdbPublisher thisPublisher; boolean publisherIsNew = true; if (tdbIsNew) { // no need to check for existing publisher if TDB is new thisPublisher = new TdbPublisher(pubName); tdbPublisherMap.put(pubName, thisPublisher); } else { thisPublisher = tdbPublisherMap.get(pubName); publisherIsNew = (thisPublisher == null); if (publisherIsNew) { // copy publisher if not present in this Tdb thisPublisher = new TdbPublisher(pubName); tdbPublisherMap.put(pubName, thisPublisher); } } // merge non-duplicate titles of otherPublisher into thisPublisher for (TdbTitle otherTitle : otherPublisher.getTdbTitles()) { String otherId = otherTitle.getId(); TdbTitle thisTitle; boolean titleIsNew = true; if (publisherIsNew) { // no need to check for existing title if publisher is new thisTitle = otherTitle.copyForTdbPublisher(thisPublisher); thisPublisher.addTdbTitle(thisTitle); } else { thisTitle = thisPublisher.getTdbTitleById(otherId); titleIsNew = (thisTitle == null); if (titleIsNew) { // copy title if not present in this publisher thisTitle = otherTitle.copyForTdbPublisher(thisPublisher); thisPublisher.addTdbTitle(thisTitle); } else if (! thisTitle.getName().equals(otherTitle.getName())) { // error because it could lead to a missing title -- one probably has a typo // (what about checking other title elements too?) logger.error("Ignorning duplicate title entry: \"" + otherTitle.getName() + "\" with the same ID as \"" + thisTitle.getName() + "\""); } } // merge non-duplicate TdbAus of otherTitle into thisTitle for (TdbAu otherAu : otherTitle.getTdbAus()) { // no need to check for existing au if title is new String pluginId = otherAu.getPluginId(); if (titleIsNew || !getTdbAus(pluginId).contains(otherAu)) { // always succeeds we've already checked for duplicate TdbAu thisAu = otherAu.copyForTdbTitle(thisTitle); addTdbAuForPlugin(thisAu); } else { TdbAu thisAu = findExistingTdbAu(otherAu); if (!thisAu.getTdbTitle().getName().equals(otherAu.getTdbTitle().getName())) { if (!thisAu.getName().equals(otherAu.getName())) { logger.error("Ignorning duplicate au entry: \"" + otherAu.getName() + "\" for title \"" + otherAu.getTdbTitle().getName() + "\" with same definion as existing au entry: \"" + thisAu.getName() + "\" for title \"" + thisAu.getTdbTitle().getName() + "\""); } else { logger.error("Ignorning duplicate au entry: \"" + otherAu.getName() + "\" for title \"" + otherAu.getTdbTitle().getName() + "\" with same definion as existing one for title \"" + thisAu.getTdbTitle().getName() + "\""); } } else if (!thisAu.getName().equals(otherAu.getName())) { // error because it could lead to a missing AU -- one probably has a typo logger.error("Ignorning duplicate au entry: \"" + otherAu.getName() + "\" with the same definition as \"" + thisAu.getName() + "\" for title \"" + otherAu.getTdbTitle().getName()); } else { logger.warning("Ignoring duplicate au entry: \"" + otherAu.getName() + "\" for title \"" + otherAu.getTdbTitle().getName()); } } } } } } /** * Find existing TdbAu with same Id as another one. * @param otherAu another TdbAu * @return an existing TdbAu already in thisTdb */ protected TdbAu findExistingTdbAu(TdbAu otherAu) { // check for duplicate AU with same plugin for this Tdb Collection<TdbAu> aus = getTdbAus(otherAu.getPluginId()); for (TdbAu au : aus) { if (au.equals(otherAu)) { return au; } } return null; } /** * Returns a collection of TdbAus for the specified plugin ID. * <p> * Note: the returned collection should not be modified. * * @param pluginId the plugin ID * @return a collection of TdbAus for the plugin; <code>null</code> * if no TdbAus for the specified plugin in this configuration. */ public Collection<TdbAu> getTdbAus(String pluginId) { Collection<TdbAu> aus = pluginIdTdbAusMap.get(pluginId); return (aus != null) ? aus : Collections.<TdbAu>emptyList(); } /** * Returns the TdbAus for all plugin IDs. * <p> * Note: the returned map should not be modified. * * @return the TdbAus for all plugin IDs */ public Map<String, Collection<TdbAu>> getAllTdbAus() { return (pluginIdTdbAusMap != null) ? pluginIdTdbAusMap : Collections.<String,Collection<TdbAu>>emptyMap(); } /** * Return the number of TdbAus in this Tdb. * * @return the total TdbAu count */ public int getTdbAuCount() { return tdbAuCount; } /** * Return the number of TdbTitles in this Tdb. * * @return the total TdbTitle count */ public int getTdbTitleCount() { int titleCount = 0; for (TdbPublisher publisher : tdbPublisherMap.values()) { titleCount += publisher.getTdbTitleCount(); } return titleCount; } /** * Return the number of TdbPublishers in this Tdb. * * @return the total TdbPublisher count */ public int getTdbPublisherCount() { return tdbPublisherMap.size(); } /** * Add a new TdbAu from properties. This method recognizes * properties of the following form: * <pre> * Properties p = new Properties(); * p.setProperty("title", "Air & Space Volume 1)"); * p.setProperty("journalTitle", "Air and Space"); * p.setProperty("plugin", org.lockss.plugin.smithsonian); * p.setProperty("pluginVersion", "4"); * p.setProperty("issn", "0886-2257"); * p.setProperty("param.1.key", "volume"); * p.setProperty("param.1.value", "1"); * p.setProperty("param.2.key", "year"); * p.setProperty("param.2.value", "2001"); * p.setProperty("param.2.editable", "true"); * p.setProperty("param.3.key", "journal_id"); * p.setProperty("param.3.value", "0886-2257"); * p.setProperty("attributes.publisher", "Smithsonian Institution"); * </pre> * <p> * The "attributes.publisher" property is used to identify the publisher. * If a unique journalID is specified it is used to select among titles * for a publisher. A journalID can be specified indirectly through a * "journal_id" param or an "issn" property. If a journalId is not * specified, the "journalTitle" property is used to select the the title. * <p> * Properties other than "param", "attributes", "title", "journalTitle", * "journalId", and "plugin" are converted to attributes of the AU. Only * "title" and "plugin" are required properties. If "attributes.publisher" * or "journalTitle" are missing, their values are synthesized from the * "title" property. * * @param props a map of title properties * @return the TdbAu that was added * @throws TdbException if this Tdb is sealed, or the * AU already exists in this Tdb */ public TdbAu addTdbAuFromProperties(Properties props) throws TdbException { if (props == null) { throw new IllegalArgumentException("properties cannot be null"); } // verify not sealed if (isSealed()) { throw new TdbException("cannot add au to sealed TDB"); } TdbAu au = newTdbAu(props); addTdbAu(props, au); return au; } /** * Create a new TdbAu instance from the properties. * * @param props the properties * @return a TdbAu instance set built from the properties */ private TdbAu newTdbAu(Properties props) { String pluginId = (String)props.get("plugin"); if (pluginId == null) { throw new IllegalArgumentException("TdbAu plugin ID not specified"); } String auName = props.getProperty("title"); if (auName == null) { throw new IllegalArgumentException("TdbAu title not specified"); } // create a new TdbAu and set its elements TdbAu au = new TdbAu(auName, pluginId); // process attrs, and params Map<String, Map<String,String>> paramMap = new HashMap<String, Map<String,String>>(); for (Map.Entry<Object,Object> entry : props.entrySet()) { String key = String.valueOf(entry.getKey()); String value = String.valueOf(entry.getValue()); if (key.startsWith("attributes.")) { // set attributes directly String name = key.substring("attributes.".length()); try { au.setAttr(name, value); } catch (TdbException ex) { logger.warning("Cannot set attribute \"" + name + "\" with value \"" + value + "\" -- ignoring"); } } else if (key.startsWith("param.")) { // skip to param name String param = key.substring("param.".length()); int i; if ( ((i = param.indexOf(".key")) < 0) && ((i = param.indexOf(".value")) < 0)) { logger.warning("Ignoring unexpected param key for au \"" + auName + "\" key: \"" + key + "\" -- ignoring"); } else { // get param map for pname String pname = param.substring(0,i); Map<String,String> pmap = paramMap.get(pname); if (pmap == null) { pmap = new HashMap<String,String>(); paramMap.put(pname, pmap); } // add name and value to param map for pname String name = param.substring(i+1); pmap.put(name, value); } } else if ( !key.equals("title") // TdbAu has "name" property && !key.equals("plugin") // TdbAu has "pluginId" property && !key.equals("journalTitle") // TdbAu has "title" TdbTitle property && !key.startsWith("journal.")) { // TdbAu has "title" TdbTitle property // translate all other properties into AU properties try { au.setPropertyByName(key, value); } catch (TdbException ex) { logger.warning("Cannot set property \"" + key + "\" with value \"" + value + "\" -- ignoring"); } } } // set param from accumulated "key", and "value" entries for (Map<String, String> pmap : paramMap.values()) { String name = pmap.get("key"); String value = pmap.get("value"); if (name == null) { logger.warning("Ignoring property with null name"); } else if (value == null) { logger.warning("Ignoring property \"" + name + "\" with null value"); } else { try { au.setParam(name, value); } catch (TdbException ex) { logger.warning("Cannot set param \"" + name + "\" with value \"" + value + "\" -- ignoring"); } } } return au; } /** * Add a TdbAu to a TdbTitle and TdbPubisher, and add links to * the TdbTitle specified by the properties. * * @param props the properties * @param au the TdbAu to add * @throws TdbException if the AU already exists in this Tdb */ private void addTdbAu(Properties props, TdbAu au) throws TdbException { // add au for plugin assuming it is not a duplicate if (!addTdbAuForPlugin(au)) { // au already registered -- report existing au TdbAu existingAu = findExistingTdbAu(au); String titleName = getTdbTitleName(props, au); if (!titleName.equals(existingAu.getTdbTitle().getName())) { throw new TdbException( "Cannot add duplicate au entry: \"" + au.getName() + "\" for title \"" + titleName + "\" with same definition as existing au entry: \"" + existingAu.getName() + "\" for title \"" + existingAu.getTdbTitle().getName() + "\" to title database"); } else if (!existingAu.getName().equals(au.getName())) { // error because it could lead to a missing AU -- one probably has a typo throw new TdbException( "Cannot add duplicate au entry: \"" + au.getName() + "\" with the same definition as \"" + existingAu.getName() + "\" for title \"" + titleName + "\" to title database"); } else { throw new TdbException( "Cannot add duplicate au entry: \"" + au.getName() + "\" for title \"" + titleName + "\" to title database"); } } // get or create the TdbTitle for this TdbTitle title = getTdbTitle(props, au); try { // add AU to title title.addTdbAu(au); } catch (TdbException ex) { // if we can't add au to title, remove for plugin and re-throw exception removeTdbAuForPlugin(au); throw ex; } // process title links Map<String, Map<String,String>> linkMap = new HashMap<String, Map<String,String>>(); for (Map.Entry<Object,Object> entry : props.entrySet()) { String key = ""+entry.getKey(); String value = ""+entry.getValue(); if (key.startsWith("journal.link.")) { // skip to link name String param = key.substring("link.".length()); int i; if ( ((i = param.indexOf(".type")) < 0) && ((i = param.indexOf(".journalId")) < 0)) { logger.warning("Ignoring nexpected link key for au \"" + au.getName() + "\" key: \"" + key + "\""); } else { // get link map for linkName String lname = param.substring(0,i); Map<String,String> lmap = linkMap.get(lname); if (lmap == null) { lmap = new HashMap<String,String>(); linkMap.put(lname, lmap); } // add name and value to link map for link String name = param.substring(i+1); lmap.put(name, value); } } } // add links to title from accumulated "type", "journalId" entries for (Map<String, String> lmap : linkMap.values()) { String name = lmap.get("type"); String value = lmap.get("journalId"); if ((name != null) && (value != null)) { try { TdbTitle.LinkType linkType = TdbTitle.LinkType.valueOf(name); title.addLinkToTdbTitleId(linkType, value); } catch (IllegalArgumentException ex) { logger.warning("Ignoring unknown link type for au \"" + au.getName() + "\" name: \"" + name + "\""); } } } } /** * Get title ID from properties and TdbAu. * * @param props the properties * @param au the TdbAu * @return the title ID or <code>null</code> if not found */ private String getTdbTitleId(Properties props, TdbAu au) { // get the title ID from one of several props String titleId = props.getProperty("journal.id"); // proposed new property if (titleId == null) { // use "journal_id" param as title Id if not already set // proposed to replace with "journal.id" property titleId = au.getParam("journal_id"); } // use isbn property as title id if not already set if (titleId == null) { titleId = props.getProperty("isbn"); } // use eissn property as title id if not already set if (titleId == null) { titleId = props.getProperty("eissn"); } // use issn property as title id if not already set if (titleId == null) { titleId = props.getProperty("issn"); } return titleId; } /** * Get or create TdbTitle for the specified properties and TdbAu. * * @param props the properties * @param au the TdbAu * @return the corresponding TdbTitle */ private TdbTitle getTdbTitle(Properties props, TdbAu au) { TdbTitle title = null; // get publisher name String publisherNameFromProps = getTdbPublisherName(props, au); // get the title name String titleNameFromProps = getTdbTitleName(props, au); // get the title ID String titleIdFromProps = getTdbTitleId(props, au); String titleId = titleIdFromProps; if (titleId == null) { // generate a titleId if one not specified, using the // hash code of the combined title name and publisher names int hash = (titleNameFromProps + publisherNameFromProps).hashCode(); titleId = (hash < 0) ? ("id:1" +(-hash)) : ("id:0" + hash); } // get publisher specified by property name TdbPublisher publisher = tdbPublisherMap.get(publisherNameFromProps); if (publisher != null) { // find title from publisher title = publisher.getTdbTitleById(titleId); if (title != null) { // warn that title name is different if (!title.getName().equals(titleNameFromProps)) { logger.warning("Title for au \"" + au.getName() + "\": \"" + titleNameFromProps + "\" is different than existing title \"" + title.getName() + "\" for id " + titleId + " -- using existing title."); } return title; } } if (publisher == null) { // warn of missing publisher name if (publisherNameFromProps.startsWith(UNKNOWN_PUBLISHER_PREFIX)) { logger.warning("Publisher missing for au \"" + au.getName() + "\" -- using \"" + publisherNameFromProps + "\""); } // create new publisher for specified publisher name publisher = new TdbPublisher(publisherNameFromProps); tdbPublisherMap.put(publisherNameFromProps, publisher); } // warn of missing title name and/or id if (titleNameFromProps.startsWith(UNKNOWN_TITLE_PREFIX)) { logger.warning("Title missing for au \"" + au.getName() + "\" -- using \"" + titleNameFromProps + "\""); } if (titleIdFromProps == null) { logger.warning("Title ID missing for au \"" + au.getName() + "\" -- using " + titleId); } // create title and add to publisher title = new TdbTitle(titleNameFromProps, titleId); try { publisher.addTdbTitle(title); } catch (TdbException ex) { // shouldn't happen: title already exists in publisher logger.error(ex.getMessage(), ex); } return title; } /** * Get publisher name from properties and TdbAu. Creates a name * based on title name if not specified. * * @param props the properties * @param au the TdbAu * @return the publisher name */ private String getTdbPublisherName(Properties props, TdbAu au) { // use "publisher" attribute if specified, or synthesize from titleName. // proposed to replace with publisher.name property String publisherName = props.getProperty("attributes.publisher"); if (publisherName == null) { publisherName = props.getProperty("publisher.name"); // proposed new property } if (publisherName == null) { // create publisher name from title name if not specified String titleName = getTdbTitleName(props, au); publisherName = UNKNOWN_PUBLISHER_PREFIX + "[" + titleName + "]"; } return publisherName; } /** * Get the TdbTitle name from the properties. Fall back to a name * derived from the TdbAU name if not specified. * * @param props a group of properties * @param au the TdbAu * @return a TdbTitle name */ private String getTdbTitleName(Properties props, TdbAu au) { // use "journalTitle" prop if specified, or synthesize it // from auName and one of several properties String titleName = props.getProperty("journalTitle"); if (titleName == null) { titleName = props.getProperty("journal.title"); // proposed to replace journalTitle } if (titleName == null) { String issue = au.getParam("issue"); String year = au.getParam("year"); String volume = au.getParam("volume"); if (volume == null) { volume = au.getParam("volume_str"); } String auName = au.getName(); String auNameLC = auName.toLowerCase(); if ((volume != null) && auNameLC.endsWith(" vol " + volume)) { titleName = auName.substring(0, auName.length()-" vol ".length() - volume.length()); } else if ((volume != null) && auNameLC.endsWith(" volume " + volume)) { titleName = auName.substring(0, auName.length()-" volume ".length() - volume.length()); } else if ((issue != null) && auNameLC.endsWith(" issue " + issue)) { titleName = auName.substring(0, auName.length()-" issue ".length() - issue.length()); } else if ((year != null) && auNameLC.endsWith(" " + year)) { titleName = auName.substring(0, auName.length()-" ".length() - year.length()); } else { titleName = UNKNOWN_TITLE_PREFIX + "[" + auName + "]"; } } return titleName; } /** * Get the linked titles for the specified link type. * * @param linkType the link type {@see TdbTitle} for description of link types * @param title the TdbTitle with links * @return a collection of linked titles for the specified type */ public Collection<TdbTitle> getLinkedTdbTitlesForType(TdbTitle.LinkType linkType, TdbTitle title) { if (linkType == null) { throw new IllegalArgumentException("linkType cannot be null"); } if (title == null) { throw new IllegalArgumentException("title cannot be null"); } Collection<String> titleIds = title.getLinkedTdbTitleIdsForType(linkType); if (titleIds.isEmpty()) { return Collections.emptyList(); } ArrayList<TdbTitle> titles = new ArrayList<TdbTitle>(); for (String titleId : titleIds) { TdbTitle aTitle = getTdbTitleById(titleId); if (aTitle != null) { titles.add(aTitle); } } titles.trimToSize(); return titles; } /** * Get the title for the specified titleId. * * @param titleId the titleID * @return the title for the titleId or <code>null</code. if not found */ public TdbTitle getTdbTitleById(String titleId) { if (titleId == null) { throw new IllegalArgumentException("titleId cannot be null"); } TdbTitle title = null; for (TdbPublisher publisher : tdbPublisherMap.values()) { title = publisher.getTdbTitleById(titleId); if (title != null) { break; } } return title; } /** * Returns a collection of TdbTitles for the specified title name * across all publishers. * * @param titleName the title name * @return a collection of TdbTitles that match the title name */ public Collection<TdbTitle> getTdbTitlesByName(String titleName) { if (titleName == null) { return Collections.emptyList(); } ArrayList<TdbTitle> titles = new ArrayList<TdbTitle>(); for (TdbPublisher publisher : tdbPublisherMap.values()) { titles.addAll(publisher.getTdbTitlesByName(titleName)); } titles.trimToSize(); return titles; } /** * Get the publisher for the specified name. * * @param name the publisher name * @return the publisher, or <code>null</code> if not found */ public TdbPublisher getTdbPublisher(String name) { return (tdbPublisherMap != null) ? tdbPublisherMap.get(name) : null; } /** * Returns all TdbPubishers in this configuration. * <p> * Note: The returned map should not be modified. * * @return a map of publisher names to publishers */ public Map<String, TdbPublisher> getAllTdbPublishers() { return (tdbPublisherMap != null) ? tdbPublisherMap : Collections.<String,TdbPublisher>emptyMap(); } /** Print a full description of all elements in the Tdb */ public void prettyPrint(PrintStream ps) { ps.println("Tdb"); TreeMap<String, TdbPublisher> sorted = new TreeMap<String, TdbPublisher>(CatalogueOrderComparator.SINGLETON); sorted.putAll(getAllTdbPublishers()); for (TdbPublisher tdbPublisher : sorted.values()) { tdbPublisher.prettyPrint(ps, 2); } } }
Update to refresh headers. git-svn-id: 293778eaa97c8c94097d610b1bd5133a8f478f36@10522 4f837ed2-42f5-46e7-a7a5-fa17313484d4
src/org/lockss/config/Tdb.java
Update to refresh headers.
<ide><path>rc/org/lockss/config/Tdb.java <ide> /* <del> * $Id: Tdb.java,v 1.11 2010-08-15 13:23:40 pgust Exp $ <add> * $Id: Tdb.java,v 1.12 2011-01-07 19:53:06 pgust Exp $ <ide> */ <ide> <ide> /* <ide> * a specified plugin ID. <ide> * <ide> * @author Philip Gust <del> * @version $Id: Tdb.java,v 1.11 2010-08-15 13:23:40 pgust Exp $ <add> * @version $Id: Tdb.java,v 1.12 2011-01-07 19:53:06 pgust Exp $ <ide> */ <ide> public class Tdb { <ide> /** <ide> * also handle this exception. <ide> * <ide> * @author Philip Gust <del> * @version $Id: Tdb.java,v 1.11 2010-08-15 13:23:40 pgust Exp $ <add> * @version $Id: Tdb.java,v 1.12 2011-01-07 19:53:06 pgust Exp $ <ide> */ <ide> @SuppressWarnings("serial") <ide> static public class TdbException extends Exception {
Java
apache-2.0
ab55763937e6af2d67eb910f4973008de252c4bd
0
smartnews/presto,smartnews/presto,ebyhr/presto,dain/presto,ebyhr/presto,dain/presto,losipiuk/presto,smartnews/presto,dain/presto,losipiuk/presto,ebyhr/presto,ebyhr/presto,dain/presto,Praveen2112/presto,Praveen2112/presto,losipiuk/presto,losipiuk/presto,ebyhr/presto,losipiuk/presto,dain/presto,smartnews/presto,Praveen2112/presto,Praveen2112/presto,Praveen2112/presto,smartnews/presto
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.trino.plugin.iceberg; import com.google.common.base.VerifyException; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import io.trino.Session; import io.trino.metadata.Metadata; import io.trino.metadata.QualifiedObjectName; import io.trino.metadata.TableHandle; import io.trino.plugin.hive.HdfsEnvironment; import io.trino.spi.connector.ColumnHandle; import io.trino.spi.connector.Constraint; import io.trino.spi.connector.ConstraintApplicationResult; import io.trino.spi.connector.TableNotFoundException; import io.trino.spi.predicate.Domain; import io.trino.spi.predicate.NullableValue; import io.trino.spi.predicate.TupleDomain; import io.trino.spi.statistics.ColumnStatistics; import io.trino.spi.statistics.TableStatistics; import io.trino.testing.BaseConnectorTest; import io.trino.testing.MaterializedResult; import io.trino.testing.MaterializedRow; import io.trino.testing.QueryRunner; import io.trino.testing.TestingConnectorBehavior; import io.trino.testing.sql.TestTable; import io.trino.transaction.TransactionBuilder; import org.apache.avro.Schema; import org.apache.avro.file.DataFileReader; import org.apache.avro.file.DataFileWriter; import org.apache.avro.generic.GenericData; import org.apache.avro.generic.GenericDatumReader; import org.apache.avro.generic.GenericDatumWriter; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.iceberg.FileFormat; import org.intellij.lang.annotations.Language; import org.testng.SkipException; import org.testng.annotations.DataProvider; import org.testng.annotations.Test; import java.io.File; import java.io.OutputStream; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.function.Consumer; import java.util.function.Predicate; import java.util.regex.Matcher; import java.util.regex.Pattern; import java.util.stream.IntStream; import java.util.stream.LongStream; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.collect.ImmutableList.toImmutableList; import static com.google.common.collect.ImmutableMap.toImmutableMap; import static com.google.common.collect.Iterables.getOnlyElement; import static io.trino.plugin.hive.HdfsEnvironment.HdfsContext; import static io.trino.plugin.hive.HiveTestUtils.HDFS_ENVIRONMENT; import static io.trino.plugin.iceberg.IcebergQueryRunner.createIcebergQueryRunner; import static io.trino.plugin.iceberg.IcebergSplitManager.ICEBERG_DOMAIN_COMPACTION_THRESHOLD; import static io.trino.spi.predicate.Domain.multipleValues; import static io.trino.spi.predicate.Domain.singleValue; import static io.trino.spi.type.BigintType.BIGINT; import static io.trino.spi.type.DoubleType.DOUBLE; import static io.trino.spi.type.VarcharType.VARCHAR; import static io.trino.testing.MaterializedResult.resultBuilder; import static io.trino.testing.assertions.Assert.assertEquals; import static io.trino.transaction.TransactionBuilder.transaction; import static java.lang.String.format; import static java.util.Objects.requireNonNull; import static java.util.stream.Collectors.joining; import static java.util.stream.IntStream.range; import static org.apache.iceberg.FileFormat.ORC; import static org.apache.iceberg.FileFormat.PARQUET; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatThrownBy; import static org.testng.Assert.assertFalse; import static org.testng.Assert.assertNotEquals; import static org.testng.Assert.assertNotNull; import static org.testng.Assert.assertTrue; public abstract class BaseIcebergConnectorTest extends BaseConnectorTest { private static final Pattern WITH_CLAUSE_EXTRACTER = Pattern.compile(".*(WITH\\s*\\([^)]*\\))\\s*$", Pattern.DOTALL); private final FileFormat format; protected BaseIcebergConnectorTest(FileFormat format) { this.format = requireNonNull(format, "format is null"); } @Override protected QueryRunner createQueryRunner() throws Exception { return createIcebergQueryRunner(ImmutableMap.of(), format, REQUIRED_TPCH_TABLES); } @Override protected boolean hasBehavior(TestingConnectorBehavior connectorBehavior) { switch (connectorBehavior) { case SUPPORTS_COMMENT_ON_COLUMN: case SUPPORTS_TOPN_PUSHDOWN: return false; case SUPPORTS_CREATE_VIEW: return true; case SUPPORTS_CREATE_MATERIALIZED_VIEW: return true; case SUPPORTS_DELETE: return true; default: return super.hasBehavior(connectorBehavior); } } @Test @Override public void testDelete() { // Deletes are covered with testMetadataDelete test methods assertThatThrownBy(super::testDelete) .hasStackTraceContaining("This connector only supports delete where one or more partitions are deleted entirely"); } @Override public void testDeleteWithComplexPredicate() { // Deletes are covered with testMetadataDelete test methods assertThatThrownBy(super::testDeleteWithComplexPredicate) .hasStackTraceContaining("This connector only supports delete where one or more partitions are deleted entirely"); } @Override public void testDeleteWithSemiJoin() { // Deletes are covered with testMetadataDelete test methods assertThatThrownBy(super::testDeleteWithSemiJoin) .hasStackTraceContaining("This connector only supports delete where one or more partitions are deleted entirely"); } @Override public void testDeleteWithSubquery() { // Deletes are covered with testMetadataDelete test methods assertThatThrownBy(super::testDeleteWithSubquery) .hasStackTraceContaining("This connector only supports delete where one or more partitions are deleted entirely"); } @Override public void testDeleteWithVarcharPredicate() { // Deletes are covered with testMetadataDelete test methods assertThatThrownBy(super::testDeleteWithVarcharPredicate) .hasStackTraceContaining("This connector only supports delete where one or more partitions are deleted entirely"); } @Override public void testRowLevelDelete() { // Deletes are covered with testMetadataDelete test methods assertThatThrownBy(super::testRowLevelDelete) .hasStackTraceContaining("This connector only supports delete where one or more partitions are deleted entirely"); } @Test @Override public void testShowCreateSchema() { assertThat(computeActual("SHOW CREATE SCHEMA tpch").getOnlyValue().toString()) .matches("CREATE SCHEMA iceberg.tpch\n" + "AUTHORIZATION USER user\n" + "WITH \\(\n" + "\\s+location = '.*/iceberg_data/tpch'\n" + "\\)"); } @Override @Test public void testDescribeTable() { MaterializedResult expectedColumns = resultBuilder(getSession(), VARCHAR, VARCHAR, VARCHAR, VARCHAR) .row("orderkey", "bigint", "", "") .row("custkey", "bigint", "", "") .row("orderstatus", "varchar", "", "") .row("totalprice", "double", "", "") .row("orderdate", "date", "", "") .row("orderpriority", "varchar", "", "") .row("clerk", "varchar", "", "") .row("shippriority", "integer", "", "") .row("comment", "varchar", "", "") .build(); MaterializedResult actualColumns = computeActual("DESCRIBE orders"); assertEquals(actualColumns, expectedColumns); } @Override @Test public void testShowCreateTable() { assertThat(computeActual("SHOW CREATE TABLE orders").getOnlyValue()) .isEqualTo("CREATE TABLE iceberg.tpch.orders (\n" + " orderkey bigint,\n" + " custkey bigint,\n" + " orderstatus varchar,\n" + " totalprice double,\n" + " orderdate date,\n" + " orderpriority varchar,\n" + " clerk varchar,\n" + " shippriority integer,\n" + " comment varchar\n" + ")\n" + "WITH (\n" + " format = '" + format.name() + "'\n" + ")"); } @Override protected void checkInformationSchemaViewsForMaterializedView(String schemaName, String viewName) { // TODO should probably return materialized view, as it's also a view -- to be double checked assertThatThrownBy(() -> super.checkInformationSchemaViewsForMaterializedView(schemaName, viewName)) .hasMessageFindingMatch("(?s)Expecting.*to contain:.*\\Q[(" + viewName + ")]"); } @Test public void testDecimal() { testDecimalWithPrecisionAndScale(1, 0); testDecimalWithPrecisionAndScale(8, 6); testDecimalWithPrecisionAndScale(9, 8); testDecimalWithPrecisionAndScale(10, 8); testDecimalWithPrecisionAndScale(18, 1); testDecimalWithPrecisionAndScale(18, 8); testDecimalWithPrecisionAndScale(18, 17); testDecimalWithPrecisionAndScale(17, 16); testDecimalWithPrecisionAndScale(18, 17); testDecimalWithPrecisionAndScale(24, 10); testDecimalWithPrecisionAndScale(30, 10); testDecimalWithPrecisionAndScale(37, 26); testDecimalWithPrecisionAndScale(38, 37); testDecimalWithPrecisionAndScale(38, 17); testDecimalWithPrecisionAndScale(38, 37); } private void testDecimalWithPrecisionAndScale(int precision, int scale) { checkArgument(precision >= 1 && precision <= 38, "Decimal precision (%s) must be between 1 and 38 inclusive", precision); checkArgument(scale < precision && scale >= 0, "Decimal scale (%s) must be less than the precision (%s) and non-negative", scale, precision); String decimalType = format("DECIMAL(%d,%d)", precision, scale); String beforeTheDecimalPoint = "12345678901234567890123456789012345678".substring(0, precision - scale); String afterTheDecimalPoint = "09876543210987654321098765432109876543".substring(0, scale); String decimalValue = format("%s.%s", beforeTheDecimalPoint, afterTheDecimalPoint); assertUpdate(format("CREATE TABLE test_iceberg_decimal (x %s)", decimalType)); assertUpdate(format("INSERT INTO test_iceberg_decimal (x) VALUES (CAST('%s' AS %s))", decimalValue, decimalType), 1); assertQuery("SELECT * FROM test_iceberg_decimal", format("SELECT CAST('%s' AS %s)", decimalValue, decimalType)); dropTable("test_iceberg_decimal"); } @Test public void testTime() { testSelectOrPartitionedByTime(false); } @Test public void testPartitionedByTime() { testSelectOrPartitionedByTime(true); } private void testSelectOrPartitionedByTime(boolean partitioned) { String tableName = format("test_%s_by_time", partitioned ? "partitioned" : "selected"); String partitioning = partitioned ? "WITH(partitioning = ARRAY['x'])" : ""; assertUpdate(format("CREATE TABLE %s (x TIME(6), y BIGINT) %s", tableName, partitioning)); assertUpdate(format("INSERT INTO %s VALUES (TIME '10:12:34', 12345)", tableName), 1); assertQuery(format("SELECT COUNT(*) FROM %s", tableName), "SELECT 1"); assertQuery(format("SELECT x FROM %s", tableName), "SELECT CAST('10:12:34' AS TIME)"); assertUpdate(format("INSERT INTO %s VALUES (TIME '9:00:00', 67890)", tableName), 1); assertQuery(format("SELECT COUNT(*) FROM %s", tableName), "SELECT 2"); assertQuery(format("SELECT x FROM %s WHERE x = TIME '10:12:34'", tableName), "SELECT CAST('10:12:34' AS TIME)"); assertQuery(format("SELECT x FROM %s WHERE x = TIME '9:00:00'", tableName), "SELECT CAST('9:00:00' AS TIME)"); assertQuery(format("SELECT x FROM %s WHERE y = 12345", tableName), "SELECT CAST('10:12:34' AS TIME)"); assertQuery(format("SELECT x FROM %s WHERE y = 67890", tableName), "SELECT CAST('9:00:00' AS TIME)"); dropTable(tableName); } @Test public void testPartitionByTimestamp() { testSelectOrPartitionedByTimestamp(true); } @Test public void testSelectByTimestamp() { testSelectOrPartitionedByTimestamp(false); } private void testSelectOrPartitionedByTimestamp(boolean partitioned) { String tableName = format("test_%s_by_timestamp", partitioned ? "partitioned" : "selected"); assertUpdate(format("CREATE TABLE %s (_timestamp timestamp(6)) %s", tableName, partitioned ? "WITH (partitioning = ARRAY['_timestamp'])" : "")); @Language("SQL") String select1 = "SELECT TIMESTAMP '2017-05-01 10:12:34' _timestamp"; @Language("SQL") String select2 = "SELECT TIMESTAMP '2017-10-01 10:12:34' _timestamp"; @Language("SQL") String select3 = "SELECT TIMESTAMP '2018-05-01 10:12:34' _timestamp"; assertUpdate(format("INSERT INTO %s %s", tableName, select1), 1); assertUpdate(format("INSERT INTO %s %s", tableName, select2), 1); assertUpdate(format("INSERT INTO %s %s", tableName, select3), 1); assertQuery(format("SELECT COUNT(*) from %s", tableName), "SELECT 3"); assertQuery(format("SELECT * from %s WHERE _timestamp = TIMESTAMP '2017-05-01 10:12:34'", tableName), select1); assertQuery(format("SELECT * from %s WHERE _timestamp < TIMESTAMP '2017-06-01 10:12:34'", tableName), select1); assertQuery(format("SELECT * from %s WHERE _timestamp = TIMESTAMP '2017-10-01 10:12:34'", tableName), select2); assertQuery(format("SELECT * from %s WHERE _timestamp > TIMESTAMP '2017-06-01 10:12:34' AND _timestamp < TIMESTAMP '2018-05-01 10:12:34'", tableName), select2); assertQuery(format("SELECT * from %s WHERE _timestamp = TIMESTAMP '2018-05-01 10:12:34'", tableName), select3); assertQuery(format("SELECT * from %s WHERE _timestamp > TIMESTAMP '2018-01-01 10:12:34'", tableName), select3); dropTable(tableName); } @Test public void testCreatePartitionedTable() { assertUpdate("" + "CREATE TABLE test_partitioned_table (" + " _string VARCHAR" + ", _bigint BIGINT" + ", _integer INTEGER" + ", _real REAL" + ", _double DOUBLE" + ", _boolean BOOLEAN" + ", _decimal_short DECIMAL(3,2)" + ", _decimal_long DECIMAL(30,10)" + ", _timestamp TIMESTAMP(6)" + ", _date DATE" + ") " + "WITH (" + "partitioning = ARRAY[" + " '_string'," + " '_integer'," + " '_bigint'," + " '_boolean'," + " '_real'," + " '_double'," + " '_decimal_short', " + " '_decimal_long'," + " '_timestamp'," + " '_date']" + ")"); assertQueryReturnsEmptyResult("SELECT * FROM test_partitioned_table"); @Language("SQL") String select = "" + "SELECT" + " 'foo' _string" + ", CAST(123 AS BIGINT) _bigint" + ", 456 _integer" + ", CAST('123.45' AS REAL) _real" + ", CAST('3.14' AS DOUBLE) _double" + ", true _boolean" + ", CAST('3.14' AS DECIMAL(3,2)) _decimal_short" + ", CAST('12345678901234567890.0123456789' AS DECIMAL(30,10)) _decimal_long" + ", CAST('2017-05-01 10:12:34' AS TIMESTAMP) _timestamp" + ", CAST('2017-05-01' AS DATE) _date"; assertUpdate(format("INSERT INTO test_partitioned_table %s", select), 1); assertQuery("SELECT * FROM test_partitioned_table", select); assertQuery( "SELECT * FROM test_partitioned_table WHERE" + " 'foo' = _string" + " AND 456 = _integer" + " AND CAST(123 AS BIGINT) = _bigint" + " AND true = _boolean" + " AND CAST('3.14' AS DECIMAL(3,2)) = _decimal_short" + " AND CAST('12345678901234567890.0123456789' AS DECIMAL(30,10)) = _decimal_long" + " AND CAST('2017-05-01 10:12:34' AS TIMESTAMP) = _timestamp" + " AND CAST('2017-05-01' AS DATE) = _date", select); dropTable("test_partitioned_table"); } @Test public void testCreatePartitionedTableWithNestedTypes() { assertUpdate("" + "CREATE TABLE test_partitioned_table_nested_type (" + " _string VARCHAR" + ", _struct ROW(_field1 INT, _field2 VARCHAR)" + ", _date DATE" + ") " + "WITH (" + " partitioning = ARRAY['_date']" + ")"); dropTable("test_partitioned_table_nested_type"); } @Test public void testPartitionedTableWithNullValues() { assertUpdate("CREATE TABLE test_partitioned_table_with_null_values (" + " _string VARCHAR" + ", _bigint BIGINT" + ", _integer INTEGER" + ", _real REAL" + ", _double DOUBLE" + ", _boolean BOOLEAN" + ", _decimal_short DECIMAL(3,2)" + ", _decimal_long DECIMAL(30,10)" + ", _timestamp TIMESTAMP(6)" + ", _date DATE" + ") " + "WITH (" + "partitioning = ARRAY[" + " '_string'," + " '_integer'," + " '_bigint'," + " '_boolean'," + " '_real'," + " '_double'," + " '_decimal_short', " + " '_decimal_long'," + " '_timestamp'," + " '_date']" + ")"); assertQueryReturnsEmptyResult("SELECT * from test_partitioned_table_with_null_values"); @Language("SQL") String select = "" + "SELECT" + " null _string" + ", null _bigint" + ", null _integer" + ", null _real" + ", null _double" + ", null _boolean" + ", null _decimal_short" + ", null _decimal_long" + ", null _timestamp" + ", null _date"; assertUpdate("INSERT INTO test_partitioned_table_with_null_values " + select, 1); assertQuery("SELECT * from test_partitioned_table_with_null_values", select); dropTable("test_partitioned_table_with_null_values"); } @Test public void testCreatePartitionedTableAs() { assertUpdate( "CREATE TABLE test_create_partitioned_table_as " + "WITH (" + "partitioning = ARRAY['ORDER_STATUS', 'Ship_Priority', 'Bucket(order_key,9)']" + ") " + "AS " + "SELECT orderkey AS order_key, shippriority AS ship_priority, orderstatus AS order_status " + "FROM tpch.tiny.orders", "SELECT count(*) from orders"); assertEquals( computeScalar("SHOW CREATE TABLE test_create_partitioned_table_as"), format( "CREATE TABLE %s.%s.%s (\n" + " order_key bigint,\n" + " ship_priority integer,\n" + " order_status varchar\n" + ")\n" + "WITH (\n" + " format = '%s',\n" + " partitioning = ARRAY['order_status','ship_priority','bucket(order_key, 9)']\n" + ")", getSession().getCatalog().orElseThrow(), getSession().getSchema().orElseThrow(), "test_create_partitioned_table_as", format)); assertQuery("SELECT * from test_create_partitioned_table_as", "SELECT orderkey, shippriority, orderstatus FROM orders"); dropTable("test_create_partitioned_table_as"); } @Test public void testColumnComments() { // TODO add support for setting comments on existing column and replace the test with io.trino.testing.AbstractTestDistributedQueries#testCommentColumn assertUpdate("CREATE TABLE test_column_comments (_bigint BIGINT COMMENT 'test column comment')"); assertQuery( "SHOW COLUMNS FROM test_column_comments", "VALUES ('_bigint', 'bigint', '', 'test column comment')"); dropTable("test_column_comments"); } @Test public void testTableComments() { String createTableTemplate = "" + "CREATE TABLE iceberg.tpch.test_table_comments (\n" + " _x bigint\n" + ")\n" + "COMMENT '%s'\n" + "WITH (\n" + format(" format = '%s'\n", format) + ")"; @Language("SQL") String createTableSql = format(createTableTemplate, "test table comment", format); assertUpdate(createTableSql); MaterializedResult resultOfCreate = computeActual("SHOW CREATE TABLE test_table_comments"); assertEquals(getOnlyElement(resultOfCreate.getOnlyColumnAsSet()), createTableSql); assertUpdate("COMMENT ON TABLE test_table_comments IS 'different test table comment'"); MaterializedResult resultOfCommentChange = computeActual("SHOW CREATE TABLE test_table_comments"); String afterChangeSql = format(createTableTemplate, "different test table comment", format); assertEquals(getOnlyElement(resultOfCommentChange.getOnlyColumnAsSet()), afterChangeSql); dropTable("iceberg.tpch.test_table_comments"); String createTableWithoutComment = "" + "CREATE TABLE iceberg.tpch.test_table_comments (\n" + " _x bigint\n" + ")\n" + "WITH (\n" + " format = 'ORC'\n" + ")"; assertUpdate(format(createTableWithoutComment, format)); assertUpdate("COMMENT ON TABLE test_table_comments IS NULL"); MaterializedResult resultOfRemovingComment = computeActual("SHOW CREATE TABLE test_table_comments"); assertEquals(getOnlyElement(resultOfRemovingComment.getOnlyColumnAsSet()), format(createTableWithoutComment, format)); dropTable("iceberg.tpch.test_table_comments"); } @Test public void testRollbackSnapshot() { assertUpdate("CREATE TABLE test_rollback (col0 INTEGER, col1 BIGINT)"); long afterCreateTableId = getLatestSnapshotId("test_rollback"); assertUpdate("INSERT INTO test_rollback (col0, col1) VALUES (123, CAST(987 AS BIGINT))", 1); long afterFirstInsertId = getLatestSnapshotId("test_rollback"); assertUpdate("INSERT INTO test_rollback (col0, col1) VALUES (456, CAST(654 AS BIGINT))", 1); assertQuery("SELECT * FROM test_rollback ORDER BY col0", "VALUES (123, CAST(987 AS BIGINT)), (456, CAST(654 AS BIGINT))"); assertUpdate(format("CALL system.rollback_to_snapshot('tpch', 'test_rollback', %s)", afterFirstInsertId)); assertQuery("SELECT * FROM test_rollback ORDER BY col0", "VALUES (123, CAST(987 AS BIGINT))"); assertUpdate(format("CALL system.rollback_to_snapshot('tpch', 'test_rollback', %s)", afterCreateTableId)); assertEquals((long) computeActual("SELECT COUNT(*) FROM test_rollback").getOnlyValue(), 0); dropTable("test_rollback"); } private long getLatestSnapshotId(String tableName) { return (long) computeActual(format("SELECT snapshot_id FROM \"%s$snapshots\" ORDER BY committed_at DESC LIMIT 1", tableName)) .getOnlyValue(); } @Test public void testInsertIntoNotNullColumn() { assertUpdate("CREATE TABLE test_not_null_table (c1 INTEGER, c2 INTEGER NOT NULL)"); assertUpdate("INSERT INTO test_not_null_table (c2) VALUES (2)", 1); assertQuery("SELECT * FROM test_not_null_table", "VALUES (NULL, 2)"); assertQueryFails("INSERT INTO test_not_null_table (c1) VALUES (1)", "NULL value not allowed for NOT NULL column: c2"); dropTable("test_not_null_table"); assertUpdate("CREATE TABLE test_commuted_not_null_table (a BIGINT, b BIGINT NOT NULL)"); assertUpdate("INSERT INTO test_commuted_not_null_table (b) VALUES (2)", 1); assertQuery("SELECT * FROM test_commuted_not_null_table", "VALUES (NULL, 2)"); assertQueryFails("INSERT INTO test_commuted_not_null_table (b, a) VALUES (NULL, 3)", "NULL value not allowed for NOT NULL column: b"); dropTable("test_commuted_not_null_table"); } @Test public void testSchemaEvolution() { assertUpdate("CREATE TABLE test_schema_evolution_drop_end (col0 INTEGER, col1 INTEGER, col2 INTEGER)"); assertUpdate("INSERT INTO test_schema_evolution_drop_end VALUES (0, 1, 2)", 1); assertQuery("SELECT * FROM test_schema_evolution_drop_end", "VALUES(0, 1, 2)"); assertUpdate("ALTER TABLE test_schema_evolution_drop_end DROP COLUMN col2"); assertQuery("SELECT * FROM test_schema_evolution_drop_end", "VALUES(0, 1)"); assertUpdate("ALTER TABLE test_schema_evolution_drop_end ADD COLUMN col2 INTEGER"); assertQuery("SELECT * FROM test_schema_evolution_drop_end", "VALUES(0, 1, NULL)"); assertUpdate("INSERT INTO test_schema_evolution_drop_end VALUES (3, 4, 5)", 1); assertQuery("SELECT * FROM test_schema_evolution_drop_end", "VALUES(0, 1, NULL), (3, 4, 5)"); dropTable("test_schema_evolution_drop_end"); assertUpdate("CREATE TABLE test_schema_evolution_drop_middle (col0 INTEGER, col1 INTEGER, col2 INTEGER)"); assertUpdate("INSERT INTO test_schema_evolution_drop_middle VALUES (0, 1, 2)", 1); assertQuery("SELECT * FROM test_schema_evolution_drop_middle", "VALUES(0, 1, 2)"); assertUpdate("ALTER TABLE test_schema_evolution_drop_middle DROP COLUMN col1"); assertQuery("SELECT * FROM test_schema_evolution_drop_middle", "VALUES(0, 2)"); assertUpdate("ALTER TABLE test_schema_evolution_drop_middle ADD COLUMN col1 INTEGER"); assertUpdate("INSERT INTO test_schema_evolution_drop_middle VALUES (3, 4, 5)", 1); assertQuery("SELECT * FROM test_schema_evolution_drop_middle", "VALUES(0, 2, NULL), (3, 4, 5)"); dropTable("test_schema_evolution_drop_middle"); } @Test public void testLargeInFailureOnPartitionedColumns() { assertUpdate("CREATE TABLE test_large_in_failure (col1 BIGINT, col2 BIGINT) WITH (partitioning = ARRAY['col2'])"); assertUpdate("INSERT INTO test_large_in_failure VALUES (1, 10)", 1L); assertUpdate("INSERT INTO test_large_in_failure VALUES (2, 20)", 1L); List<String> predicates = IntStream.range(0, 5000).boxed() .map(Object::toString) .collect(toImmutableList()); String filter = format("col2 IN (%s)", String.join(",", predicates)); assertThatThrownBy(() -> getQueryRunner().execute(format("SELECT * FROM test_large_in_failure WHERE %s", filter))) .isInstanceOf(RuntimeException.class) .hasMessage("java.lang.StackOverflowError"); dropTable("test_large_in_failure"); } @Test public void testCreateTableLike() { FileFormat otherFormat = format == PARQUET ? ORC : PARQUET; testCreateTableLikeForFormat(otherFormat); } private void testCreateTableLikeForFormat(FileFormat otherFormat) { assertUpdate(format("CREATE TABLE test_create_table_like_original (col1 INTEGER, aDate DATE) WITH(format = '%s', partitioning = ARRAY['aDate'])", format)); assertEquals(getTablePropertiesString("test_create_table_like_original"), "WITH (\n" + format(" format = '%s',\n", format) + " partitioning = ARRAY['adate']\n" + ")"); assertUpdate("CREATE TABLE test_create_table_like_copy0 (LIKE test_create_table_like_original, col2 INTEGER)"); assertUpdate("INSERT INTO test_create_table_like_copy0 (col1, aDate, col2) VALUES (1, CAST('1950-06-28' AS DATE), 3)", 1); assertQuery("SELECT * from test_create_table_like_copy0", "VALUES(1, CAST('1950-06-28' AS DATE), 3)"); dropTable("test_create_table_like_copy0"); assertUpdate("CREATE TABLE test_create_table_like_copy1 (LIKE test_create_table_like_original)"); assertEquals(getTablePropertiesString("test_create_table_like_copy1"), "WITH (\n" + format(" format = '%s'\n)", format)); dropTable("test_create_table_like_copy1"); assertUpdate("CREATE TABLE test_create_table_like_copy2 (LIKE test_create_table_like_original EXCLUDING PROPERTIES)"); assertEquals(getTablePropertiesString("test_create_table_like_copy2"), "WITH (\n" + format(" format = '%s'\n)", format)); dropTable("test_create_table_like_copy2"); assertUpdate("CREATE TABLE test_create_table_like_copy3 (LIKE test_create_table_like_original INCLUDING PROPERTIES)"); assertEquals(getTablePropertiesString("test_create_table_like_copy3"), "WITH (\n" + format(" format = '%s',\n", format) + " partitioning = ARRAY['adate']\n" + ")"); dropTable("test_create_table_like_copy3"); assertUpdate(format("CREATE TABLE test_create_table_like_copy4 (LIKE test_create_table_like_original INCLUDING PROPERTIES) WITH (format = '%s')", otherFormat)); assertEquals(getTablePropertiesString("test_create_table_like_copy4"), "WITH (\n" + format(" format = '%s',\n", otherFormat) + " partitioning = ARRAY['adate']\n" + ")"); dropTable("test_create_table_like_copy4"); dropTable("test_create_table_like_original"); } private String getTablePropertiesString(String tableName) { MaterializedResult showCreateTable = computeActual("SHOW CREATE TABLE " + tableName); String createTable = (String) getOnlyElement(showCreateTable.getOnlyColumnAsSet()); Matcher matcher = WITH_CLAUSE_EXTRACTER.matcher(createTable); return matcher.matches() ? matcher.group(1) : null; } @Test public void testPredicating() { assertUpdate("CREATE TABLE test_predicating_on_real (col REAL)"); assertUpdate("INSERT INTO test_predicating_on_real VALUES 1.2", 1); assertQuery("SELECT * FROM test_predicating_on_real WHERE col = 1.2", "VALUES 1.2"); dropTable("test_predicating_on_real"); } @Test public void testHourTransform() { assertUpdate("CREATE TABLE test_hour_transform (d TIMESTAMP(6), b BIGINT) WITH (partitioning = ARRAY['hour(d)'])"); @Language("SQL") String values = "VALUES " + "(TIMESTAMP '1969-12-31 22:22:22.222222', 8)," + "(TIMESTAMP '1969-12-31 23:33:11.456789', 9)," + "(TIMESTAMP '1969-12-31 23:44:55.567890', 10)," + "(TIMESTAMP '1970-01-01 00:55:44.765432', 11)," + "(TIMESTAMP '2015-01-01 10:01:23.123456', 1)," + "(TIMESTAMP '2015-01-01 10:10:02.987654', 2)," + "(TIMESTAMP '2015-01-01 10:55:00.456789', 3)," + "(TIMESTAMP '2015-05-15 12:05:01.234567', 4)," + "(TIMESTAMP '2015-05-15 12:21:02.345678', 5)," + "(TIMESTAMP '2020-02-21 13:11:11.876543', 6)," + "(TIMESTAMP '2020-02-21 13:12:12.654321', 7)"; assertUpdate("INSERT INTO test_hour_transform " + values, 11); assertQuery("SELECT * FROM test_hour_transform", values); @Language("SQL") String expected = "VALUES " + "(-2, 1, TIMESTAMP '1969-12-31 22:22:22.222222', TIMESTAMP '1969-12-31 22:22:22.222222', 8, 8), " + "(-1, 2, TIMESTAMP '1969-12-31 23:33:11.456789', TIMESTAMP '1969-12-31 23:44:55.567890', 9, 10), " + "(0, 1, TIMESTAMP '1970-01-01 00:55:44.765432', TIMESTAMP '1970-01-01 00:55:44.765432', 11, 11), " + "(394474, 3, TIMESTAMP '2015-01-01 10:01:23.123456', TIMESTAMP '2015-01-01 10:55:00.456789', 1, 3), " + "(397692, 2, TIMESTAMP '2015-05-15 12:05:01.234567', TIMESTAMP '2015-05-15 12:21:02.345678', 4, 5), " + "(439525, 2, TIMESTAMP '2020-02-21 13:11:11.876543', TIMESTAMP '2020-02-21 13:12:12.654321', 6, 7)"; if (format == ORC) { expected = "VALUES " + "(-2, 1, NULL, NULL, 8, 8), " + "(-1, 2, NULL, NULL, 9, 10), " + "(0, 1, NULL, NULL, 11, 11), " + "(394474, 3, NULL, NULL, 1, 3), " + "(397692, 2, NULL, NULL, 4, 5), " + "(439525, 2, NULL, NULL, 6, 7)"; } assertQuery("SELECT d_hour, row_count, d.min, d.max, b.min, b.max FROM \"test_hour_transform$partitions\"", expected); // Exercise IcebergMetadata.applyFilter with non-empty Constraint.predicate, via non-pushdownable predicates assertQuery( "SELECT * FROM test_hour_transform WHERE day_of_week(d) = 3 AND b % 7 = 3", "VALUES (TIMESTAMP '1969-12-31 23:44:55.567890', 10)"); assertThat(query("SHOW STATS FOR test_hour_transform")) .projected(0, 2, 3, 4, 5, 6) // ignore data size which is available for Parquet, but not for ORC .skippingTypesCheck() .matches("VALUES " + " ('d', NULL, 0e0, NULL, " + (format == ORC ? "NULL, NULL" : "'1969-12-31 22:22:22.222222', '2020-02-21 13:12:12.654321'") + "), " + " ('b', NULL, 0e0, NULL, '1', '11'), " + " (NULL, NULL, NULL, 11e0, NULL, NULL)"); dropTable("test_hour_transform"); } @Test public void testDayTransformDate() { assertUpdate("CREATE TABLE test_day_transform_date (d DATE, b BIGINT) WITH (partitioning = ARRAY['day(d)'])"); @Language("SQL") String values = "VALUES " + "(DATE '1969-01-01', 10), " + "(DATE '1969-12-31', 11), " + "(DATE '1970-01-01', 1), " + "(DATE '1970-03-04', 2), " + "(DATE '2015-01-01', 3), " + "(DATE '2015-01-13', 4), " + "(DATE '2015-01-13', 5), " + "(DATE '2015-05-15', 6), " + "(DATE '2015-05-15', 7), " + "(DATE '2020-02-21', 8), " + "(DATE '2020-02-21', 9)"; assertUpdate("INSERT INTO test_day_transform_date " + values, 11); assertQuery("SELECT * FROM test_day_transform_date", values); assertQuery( "SELECT d_day, row_count, d.min, d.max, b.min, b.max FROM \"test_day_transform_date$partitions\"", "VALUES " + "(DATE '1969-01-01', 1, DATE '1969-01-01', DATE '1969-01-01', 10, 10), " + "(DATE '1969-12-31', 1, DATE '1969-12-31', DATE '1969-12-31', 11, 11), " + "(DATE '1970-01-01', 1, DATE '1970-01-01', DATE '1970-01-01', 1, 1), " + "(DATE '1970-03-04', 1, DATE '1970-03-04', DATE '1970-03-04', 2, 2), " + "(DATE '2015-01-01', 1, DATE '2015-01-01', DATE '2015-01-01', 3, 3), " + "(DATE '2015-01-13', 2, DATE '2015-01-13', DATE '2015-01-13', 4, 5), " + "(DATE '2015-05-15', 2, DATE '2015-05-15', DATE '2015-05-15', 6, 7), " + "(DATE '2020-02-21', 2, DATE '2020-02-21', DATE '2020-02-21', 8, 9)"); // Exercise IcebergMetadata.applyFilter with non-empty Constraint.predicate, via non-pushdownable predicates assertQuery( "SELECT * FROM test_day_transform_date WHERE day_of_week(d) = 3 AND b % 7 = 3", "VALUES (DATE '1969-01-01', 10)"); assertThat(query("SHOW STATS FOR test_day_transform_date")) .projected(0, 2, 3, 4, 5, 6) // ignore data size which is available for Parquet, but not for ORC .skippingTypesCheck() .matches("VALUES " + " ('d', NULL, 0e0, NULL, '1969-01-01', '2020-02-21'), " + " ('b', NULL, 0e0, NULL, '1', '11'), " + " (NULL, NULL, NULL, 11e0, NULL, NULL)"); dropTable("test_day_transform_date"); } @Test public void testDayTransformTimestamp() { assertUpdate("CREATE TABLE test_day_transform_timestamp (d TIMESTAMP(6), b BIGINT) WITH (partitioning = ARRAY['day(d)'])"); @Language("SQL") String values = "VALUES " + "(TIMESTAMP '1969-12-25 15:13:12.876543', 8)," + "(TIMESTAMP '1969-12-30 18:47:33.345678', 9)," + "(TIMESTAMP '1969-12-31 00:00:00.000000', 10)," + "(TIMESTAMP '1969-12-31 05:06:07.234567', 11)," + "(TIMESTAMP '1970-01-01 12:03:08.456789', 12)," + "(TIMESTAMP '2015-01-01 10:01:23.123456', 1)," + "(TIMESTAMP '2015-01-01 11:10:02.987654', 2)," + "(TIMESTAMP '2015-01-01 12:55:00.456789', 3)," + "(TIMESTAMP '2015-05-15 13:05:01.234567', 4)," + "(TIMESTAMP '2015-05-15 14:21:02.345678', 5)," + "(TIMESTAMP '2020-02-21 15:11:11.876543', 6)," + "(TIMESTAMP '2020-02-21 16:12:12.654321', 7)"; assertUpdate("INSERT INTO test_day_transform_timestamp " + values, 12); assertQuery("SELECT * FROM test_day_transform_timestamp", values); @Language("SQL") String expected = "VALUES " + "(DATE '1969-12-25', 1, TIMESTAMP '1969-12-25 15:13:12.876543', TIMESTAMP '1969-12-25 15:13:12.876543', 8, 8), " + "(DATE '1969-12-30', 1, TIMESTAMP '1969-12-30 18:47:33.345678', TIMESTAMP '1969-12-30 18:47:33.345678', 9, 9), " + "(DATE '1969-12-31', 2, TIMESTAMP '1969-12-31 00:00:00.000000', TIMESTAMP '1969-12-31 05:06:07.234567', 10, 11), " + "(DATE '1970-01-01', 1, TIMESTAMP '1970-01-01 12:03:08.456789', TIMESTAMP '1970-01-01 12:03:08.456789', 12, 12), " + "(DATE '2015-01-01', 3, TIMESTAMP '2015-01-01 10:01:23.123456', TIMESTAMP '2015-01-01 12:55:00.456789', 1, 3), " + "(DATE '2015-05-15', 2, TIMESTAMP '2015-05-15 13:05:01.234567', TIMESTAMP '2015-05-15 14:21:02.345678', 4, 5), " + "(DATE '2020-02-21', 2, TIMESTAMP '2020-02-21 15:11:11.876543', TIMESTAMP '2020-02-21 16:12:12.654321', 6, 7)"; if (format == ORC) { // Parquet has min/max for timestamps but ORC does not. expected = "VALUES " + "(DATE '1969-12-25', 1, NULL, NULL, 8, 8), " + "(DATE '1969-12-30', 1, NULL, NULL, 9, 9), " + "(DATE '1969-12-31', 2, NULL, NULL, 10, 11), " + "(DATE '1970-01-01', 1, NULL, NULL, 12, 12), " + "(DATE '2015-01-01', 3, NULL, NULL, 1, 3), " + "(DATE '2015-05-15', 2, NULL, NULL, 4, 5), " + "(DATE '2020-02-21', 2, NULL, NULL, 6, 7)"; } assertQuery("SELECT d_day, row_count, d.min, d.max, b.min, b.max FROM \"test_day_transform_timestamp$partitions\"", expected); // Exercise IcebergMetadata.applyFilter with non-empty Constraint.predicate, via non-pushdownable predicates assertQuery( "SELECT * FROM test_day_transform_timestamp WHERE day_of_week(d) = 3 AND b % 7 = 3", "VALUES (TIMESTAMP '1969-12-31 00:00:00.000000', 10)"); assertThat(query("SHOW STATS FOR test_day_transform_timestamp")) .projected(0, 2, 3, 4, 5, 6) // ignore data size which is available for Parquet, but not for ORC .skippingTypesCheck() .matches("VALUES " + " ('d', NULL, 0e0, NULL, " + (format == ORC ? "NULL, NULL" : "'1969-12-25 15:13:12.876543', '2020-02-21 16:12:12.654321'") + "), " + " ('b', NULL, 0e0, NULL, '1', '12'), " + " (NULL, NULL, NULL, 12e0, NULL, NULL)"); dropTable("test_day_transform_timestamp"); } @Test public void testMonthTransformDate() { assertUpdate("CREATE TABLE test_month_transform_date (d DATE, b BIGINT) WITH (partitioning = ARRAY['month(d)'])"); @Language("SQL") String values = "VALUES " + "(DATE '1969-11-13', 1)," + "(DATE '1969-12-01', 2)," + "(DATE '1969-12-02', 3)," + "(DATE '1969-12-31', 4)," + "(DATE '1970-01-01', 5), " + "(DATE '1970-05-13', 6), " + "(DATE '1970-12-31', 7), " + "(DATE '2020-01-01', 8), " + "(DATE '2020-06-16', 9), " + "(DATE '2020-06-28', 10), " + "(DATE '2020-06-06', 11), " + "(DATE '2020-07-18', 12), " + "(DATE '2020-07-28', 13), " + "(DATE '2020-12-31', 14)"; assertUpdate("INSERT INTO test_month_transform_date " + values, 14); assertQuery("SELECT * FROM test_month_transform_date", values); assertQuery( "SELECT d_month, row_count, d.min, d.max, b.min, b.max FROM \"test_month_transform_date$partitions\"", "VALUES " + "(-2, 1, DATE '1969-11-13', DATE '1969-11-13', 1, 1), " + "(-1, 3, DATE '1969-12-01', DATE '1969-12-31', 2, 4), " + "(0, 1, DATE '1970-01-01', DATE '1970-01-01', 5, 5), " + "(4, 1, DATE '1970-05-13', DATE '1970-05-13', 6, 6), " + "(11, 1, DATE '1970-12-31', DATE '1970-12-31', 7, 7), " + "(600, 1, DATE '2020-01-01', DATE '2020-01-01', 8, 8), " + "(605, 3, DATE '2020-06-06', DATE '2020-06-28', 9, 11), " + "(606, 2, DATE '2020-07-18', DATE '2020-07-28', 12, 13), " + "(611, 1, DATE '2020-12-31', DATE '2020-12-31', 14, 14)"); // Exercise IcebergMetadata.applyFilter with non-empty Constraint.predicate, via non-pushdownable predicates assertQuery( "SELECT * FROM test_month_transform_date WHERE day_of_week(d) = 7 AND b % 7 = 3", "VALUES (DATE '2020-06-28', 10)"); assertThat(query("SHOW STATS FOR test_month_transform_date")) .projected(0, 2, 3, 4, 5, 6) // ignore data size which is available for Parquet, but not for ORC .skippingTypesCheck() .matches("VALUES " + " ('d', NULL, 0e0, NULL, '1969-11-13', '2020-12-31'), " + " ('b', NULL, 0e0, NULL, '1', '14'), " + " (NULL, NULL, NULL, 14e0, NULL, NULL)"); dropTable("test_month_transform_date"); } @Test public void testMonthTransformTimestamp() { assertUpdate("CREATE TABLE test_month_transform_timestamp (d TIMESTAMP(6), b BIGINT) WITH (partitioning = ARRAY['month(d)'])"); @Language("SQL") String values = "VALUES " + "(TIMESTAMP '1969-11-15 15:13:12.876543', 8)," + "(TIMESTAMP '1969-11-19 18:47:33.345678', 9)," + "(TIMESTAMP '1969-12-01 00:00:00.000000', 10)," + "(TIMESTAMP '1969-12-01 05:06:07.234567', 11)," + "(TIMESTAMP '1970-01-01 12:03:08.456789', 12)," + "(TIMESTAMP '2015-01-01 10:01:23.123456', 1)," + "(TIMESTAMP '2015-01-01 11:10:02.987654', 2)," + "(TIMESTAMP '2015-01-01 12:55:00.456789', 3)," + "(TIMESTAMP '2015-05-15 13:05:01.234567', 4)," + "(TIMESTAMP '2015-05-15 14:21:02.345678', 5)," + "(TIMESTAMP '2020-02-21 15:11:11.876543', 6)," + "(TIMESTAMP '2020-02-21 16:12:12.654321', 7)"; assertUpdate("INSERT INTO test_month_transform_timestamp " + values, 12); assertQuery("SELECT * FROM test_month_transform_timestamp", values); @Language("SQL") String expected = "VALUES " + "(-2, 2, TIMESTAMP '1969-11-15 15:13:12.876543', TIMESTAMP '1969-11-19 18:47:33.345678', 8, 9), " + "(-1, 2, TIMESTAMP '1969-12-01 00:00:00.000000', TIMESTAMP '1969-12-01 05:06:07.234567', 10, 11), " + "(0, 1, TIMESTAMP '1970-01-01 12:03:08.456789', TIMESTAMP '1970-01-01 12:03:08.456789', 12, 12), " + "(540, 3, TIMESTAMP '2015-01-01 10:01:23.123456', TIMESTAMP '2015-01-01 12:55:00.456789', 1, 3), " + "(544, 2, TIMESTAMP '2015-05-15 13:05:01.234567', TIMESTAMP '2015-05-15 14:21:02.345678', 4, 5), " + "(601, 2, TIMESTAMP '2020-02-21 15:11:11.876543', TIMESTAMP '2020-02-21 16:12:12.654321', 6, 7)"; if (format == ORC) { expected = "VALUES " + "(-2, 2, NULL, NULL, 8, 9), " + "(-1, 2, NULL, NULL, 10, 11), " + "(0, 1, NULL, NULL, 12, 12), " + "(540, 3, NULL, NULL, 1, 3), " + "(544, 2, NULL, NULL, 4, 5), " + "(601, 2, NULL, NULL, 6, 7)"; } assertQuery("SELECT d_month, row_count, d.min, d.max, b.min, b.max FROM \"test_month_transform_timestamp$partitions\"", expected); // Exercise IcebergMetadata.applyFilter with non-empty Constraint.predicate, via non-pushdownable predicates assertQuery( "SELECT * FROM test_month_transform_timestamp WHERE day_of_week(d) = 1 AND b % 7 = 3", "VALUES (TIMESTAMP '1969-12-01 00:00:00.000000', 10)"); assertThat(query("SHOW STATS FOR test_month_transform_timestamp")) .projected(0, 2, 3, 4, 5, 6) // ignore data size which is available for Parquet, but not for ORC .skippingTypesCheck() .matches("VALUES " + " ('d', NULL, 0e0, NULL, " + (format == ORC ? "NULL, NULL" : "'1969-11-15 15:13:12.876543', '2020-02-21 16:12:12.654321'") + "), " + " ('b', NULL, 0e0, NULL, '1', '12'), " + " (NULL, NULL, NULL, 12e0, NULL, NULL)"); dropTable("test_month_transform_timestamp"); } @Test public void testYearTransformDate() { assertUpdate("CREATE TABLE test_year_transform_date (d DATE, b BIGINT) WITH (partitioning = ARRAY['year(d)'])"); @Language("SQL") String values = "VALUES " + "(DATE '1968-10-13', 1), " + "(DATE '1969-01-01', 2), " + "(DATE '1969-03-15', 3), " + "(DATE '1970-01-01', 4), " + "(DATE '1970-03-05', 5), " + "(DATE '2015-01-01', 6), " + "(DATE '2015-06-16', 7), " + "(DATE '2015-07-28', 8), " + "(DATE '2016-05-15', 9), " + "(DATE '2016-06-06', 10), " + "(DATE '2020-02-21', 11), " + "(DATE '2020-11-10', 12)"; assertUpdate("INSERT INTO test_year_transform_date " + values, 12); assertQuery("SELECT * FROM test_year_transform_date", values); assertQuery( "SELECT d_year, row_count, d.min, d.max, b.min, b.max FROM \"test_year_transform_date$partitions\"", "VALUES " + "(-2, 1, DATE '1968-10-13', DATE '1968-10-13', 1, 1), " + "(-1, 2, DATE '1969-01-01', DATE '1969-03-15', 2, 3), " + "(0, 2, DATE '1970-01-01', DATE '1970-03-05', 4, 5), " + "(45, 3, DATE '2015-01-01', DATE '2015-07-28', 6, 8), " + "(46, 2, DATE '2016-05-15', DATE '2016-06-06', 9, 10), " + "(50, 2, DATE '2020-02-21', DATE '2020-11-10', 11, 12)"); // Exercise IcebergMetadata.applyFilter with non-empty Constraint.predicate, via non-pushdownable predicates assertQuery( "SELECT * FROM test_year_transform_date WHERE day_of_week(d) = 1 AND b % 7 = 3", "VALUES (DATE '2016-06-06', 10)"); assertThat(query("SHOW STATS FOR test_year_transform_date")) .projected(0, 2, 3, 4, 5, 6) // ignore data size which is available for Parquet, but not for ORC .skippingTypesCheck() .matches("VALUES " + " ('d', NULL, 0e0, NULL, '1968-10-13', '2020-11-10'), " + " ('b', NULL, 0e0, NULL, '1', '12'), " + " (NULL, NULL, NULL, 12e0, NULL, NULL)"); dropTable("test_year_transform_date"); } @Test public void testYearTransformTimestamp() { assertUpdate("CREATE TABLE test_year_transform_timestamp (d TIMESTAMP(6), b BIGINT) WITH (partitioning = ARRAY['year(d)'])"); @Language("SQL") String values = "VALUES " + "(TIMESTAMP '1968-03-15 15:13:12.876543', 1)," + "(TIMESTAMP '1968-11-19 18:47:33.345678', 2)," + "(TIMESTAMP '1969-01-01 00:00:00.000000', 3)," + "(TIMESTAMP '1969-01-01 05:06:07.234567', 4)," + "(TIMESTAMP '1970-01-18 12:03:08.456789', 5)," + "(TIMESTAMP '1970-03-14 10:01:23.123456', 6)," + "(TIMESTAMP '1970-08-19 11:10:02.987654', 7)," + "(TIMESTAMP '1970-12-31 12:55:00.456789', 8)," + "(TIMESTAMP '2015-05-15 13:05:01.234567', 9)," + "(TIMESTAMP '2015-09-15 14:21:02.345678', 10)," + "(TIMESTAMP '2020-02-21 15:11:11.876543', 11)," + "(TIMESTAMP '2020-08-21 16:12:12.654321', 12)"; assertUpdate("INSERT INTO test_year_transform_timestamp " + values, 12); assertQuery("SELECT * FROM test_year_transform_timestamp", values); @Language("SQL") String expected = "VALUES " + "(-2, 2, TIMESTAMP '1968-03-15 15:13:12.876543', TIMESTAMP '1968-11-19 18:47:33.345678', 1, 2), " + "(-1, 2, TIMESTAMP '1969-01-01 00:00:00.000000', TIMESTAMP '1969-01-01 05:06:07.234567', 3, 4), " + "(0, 4, TIMESTAMP '1970-01-18 12:03:08.456789', TIMESTAMP '1970-12-31 12:55:00.456789', 5, 8), " + "(45, 2, TIMESTAMP '2015-05-15 13:05:01.234567', TIMESTAMP '2015-09-15 14:21:02.345678', 9, 10), " + "(50, 2, TIMESTAMP '2020-02-21 15:11:11.876543', TIMESTAMP '2020-08-21 16:12:12.654321', 11, 12)"; if (format == ORC) { expected = "VALUES " + "(-2, 2, NULL, NULL, 1, 2), " + "(-1, 2, NULL, NULL, 3, 4), " + "(0, 4, NULL, NULL, 5, 8), " + "(45, 2, NULL, NULL, 9, 10), " + "(50, 2, NULL, NULL, 11, 12)"; } assertQuery("SELECT d_year, row_count, d.min, d.max, b.min, b.max FROM \"test_year_transform_timestamp$partitions\"", expected); // Exercise IcebergMetadata.applyFilter with non-empty Constraint.predicate, via non-pushdownable predicates assertQuery( "SELECT * FROM test_year_transform_timestamp WHERE day_of_week(d) = 2 AND b % 7 = 3", "VALUES (TIMESTAMP '2015-09-15 14:21:02.345678', 10)"); assertThat(query("SHOW STATS FOR test_year_transform_timestamp")) .projected(0, 2, 3, 4, 5, 6) // ignore data size which is available for Parquet, but not for ORC .skippingTypesCheck() .matches("VALUES " + " ('d', NULL, 0e0, NULL, " + (format == ORC ? "NULL, NULL" : "'1968-03-15 15:13:12.876543', '2020-08-21 16:12:12.654321'") + "), " + " ('b', NULL, 0e0, NULL, '1', '12'), " + " (NULL, NULL, NULL, 12e0, NULL, NULL)"); dropTable("test_year_transform_timestamp"); } @Test public void testTruncateTextTransform() { assertUpdate("CREATE TABLE test_truncate_text_transform (d VARCHAR, b BIGINT) WITH (partitioning = ARRAY['truncate(d, 2)'])"); String select = "SELECT d_trunc, row_count, d.min AS d_min, d.max AS d_max, b.min AS b_min, b.max AS b_max FROM \"test_truncate_text_transform$partitions\""; assertUpdate("INSERT INTO test_truncate_text_transform VALUES" + "('abcd', 1)," + "('abxy', 2)," + "('ab598', 3)," + "('mommy', 4)," + "('moscow', 5)," + "('Greece', 6)," + "('Grozny', 7)", 7); assertQuery("SELECT d_trunc FROM \"test_truncate_text_transform$partitions\"", "VALUES 'ab', 'mo', 'Gr'"); assertQuery("SELECT b FROM test_truncate_text_transform WHERE substring(d, 1, 2) = 'ab'", "VALUES 1, 2, 3"); assertQuery(select + " WHERE d_trunc = 'ab'", "VALUES ('ab', 3, 'ab598', 'abxy', 1, 3)"); assertQuery("SELECT b FROM test_truncate_text_transform WHERE substring(d, 1, 2) = 'mo'", "VALUES 4, 5"); assertQuery(select + " WHERE d_trunc = 'mo'", "VALUES ('mo', 2, 'mommy', 'moscow', 4, 5)"); assertQuery("SELECT b FROM test_truncate_text_transform WHERE substring(d, 1, 2) = 'Gr'", "VALUES 6, 7"); assertQuery(select + " WHERE d_trunc = 'Gr'", "VALUES ('Gr', 2, 'Greece', 'Grozny', 6, 7)"); // Exercise IcebergMetadata.applyFilter with non-empty Constraint.predicate, via non-pushdownable predicates assertQuery( "SELECT * FROM test_truncate_text_transform WHERE length(d) = 4 AND b % 7 = 2", "VALUES ('abxy', 2)"); assertThat(query("SHOW STATS FOR test_truncate_text_transform")) .projected(0, 2, 3, 4, 5, 6) // ignore data size which is available for Parquet, but not for ORC .skippingTypesCheck() .matches("VALUES " + " ('d', NULL, 0e0, NULL, NULL, NULL), " + " ('b', NULL, 0e0, NULL, '1', '7'), " + " (NULL, NULL, NULL, 7e0, NULL, NULL)"); dropTable("test_truncate_text_transform"); } @Test(dataProvider = "truncateNumberTypesProvider") public void testTruncateIntegerTransform(String dataType) { String table = format("test_truncate_%s_transform", dataType); assertUpdate(format("CREATE TABLE " + table + " (d %s, b BIGINT) WITH (partitioning = ARRAY['truncate(d, 10)'])", dataType)); String select = "SELECT d_trunc, row_count, d.min AS d_min, d.max AS d_max, b.min AS b_min, b.max AS b_max FROM \"" + table + "$partitions\""; assertUpdate("INSERT INTO " + table + " VALUES" + "(0, 1)," + "(1, 2)," + "(5, 3)," + "(9, 4)," + "(10, 5)," + "(11, 6)," + "(120, 7)," + "(121, 8)," + "(123, 9)," + "(-1, 10)," + "(-5, 11)," + "(-10, 12)," + "(-11, 13)," + "(-123, 14)," + "(-130, 15)", 15); assertQuery("SELECT d_trunc FROM \"" + table + "$partitions\"", "VALUES 0, 10, 120, -10, -20, -130"); assertQuery("SELECT b FROM " + table + " WHERE d IN (0, 1, 5, 9)", "VALUES 1, 2, 3, 4"); assertQuery(select + " WHERE d_trunc = 0", "VALUES (0, 4, 0, 9, 1, 4)"); assertQuery("SELECT b FROM " + table + " WHERE d IN (10, 11)", "VALUES 5, 6"); assertQuery(select + " WHERE d_trunc = 10", "VALUES (10, 2, 10, 11, 5, 6)"); assertQuery("SELECT b FROM " + table + " WHERE d IN (120, 121, 123)", "VALUES 7, 8, 9"); assertQuery(select + " WHERE d_trunc = 120", "VALUES (120, 3, 120, 123, 7, 9)"); assertQuery("SELECT b FROM " + table + " WHERE d IN (-1, -5, -10)", "VALUES 10, 11, 12"); assertQuery(select + " WHERE d_trunc = -10", "VALUES (-10, 3, -10, -1, 10, 12)"); assertQuery("SELECT b FROM " + table + " WHERE d = -11", "VALUES 13"); assertQuery(select + " WHERE d_trunc = -20", "VALUES (-20, 1, -11, -11, 13, 13)"); assertQuery("SELECT b FROM " + table + " WHERE d IN (-123, -130)", "VALUES 14, 15"); assertQuery(select + " WHERE d_trunc = -130", "VALUES (-130, 2, -130, -123, 14, 15)"); // Exercise IcebergMetadata.applyFilter with non-empty Constraint.predicate, via non-pushdownable predicates assertQuery( "SELECT * FROM " + table + " WHERE d % 10 = -1 AND b % 7 = 3", "VALUES (-1, 10)"); assertThat(query("SHOW STATS FOR " + table)) .projected(0, 2, 3, 4, 5, 6) // ignore data size which is available for Parquet, but not for ORC .skippingTypesCheck() .matches("VALUES " + " ('d', NULL, 0e0, NULL, '-130', '123'), " + " ('b', NULL, 0e0, NULL, '1', '15'), " + " (NULL, NULL, NULL, 15e0, NULL, NULL)"); dropTable(table); } @DataProvider public Object[][] truncateNumberTypesProvider() { return new Object[][] { {"integer"}, {"bigint"}, }; } @Test public void testTruncateDecimalTransform() { assertUpdate("CREATE TABLE test_truncate_decimal_transform (d DECIMAL(9, 2), b BIGINT) WITH (partitioning = ARRAY['truncate(d, 10)'])"); String select = "SELECT d_trunc, row_count, d.min AS d_min, d.max AS d_max, b.min AS b_min, b.max AS b_max FROM \"test_truncate_decimal_transform$partitions\""; assertUpdate("INSERT INTO test_truncate_decimal_transform VALUES" + "(12.34, 1)," + "(12.30, 2)," + "(12.29, 3)," + "(0.05, 4)," + "(-0.05, 5)", 5); assertQuery("SELECT d_trunc FROM \"test_truncate_decimal_transform$partitions\"", "VALUES 12.30, 12.20, 0.00, -0.10"); assertQuery("SELECT b FROM test_truncate_decimal_transform WHERE d IN (12.34, 12.30)", "VALUES 1, 2"); assertQuery(select + " WHERE d_trunc = 12.30", "VALUES (12.30, 2, 12.30, 12.34, 1, 2)"); assertQuery("SELECT b FROM test_truncate_decimal_transform WHERE d = 12.29", "VALUES 3"); assertQuery(select + " WHERE d_trunc = 12.20", "VALUES (12.20, 1, 12.29, 12.29, 3, 3)"); assertQuery("SELECT b FROM test_truncate_decimal_transform WHERE d = 0.05", "VALUES 4"); assertQuery(select + " WHERE d_trunc = 0.00", "VALUES (0.00, 1, 0.05, 0.05, 4, 4)"); assertQuery("SELECT b FROM test_truncate_decimal_transform WHERE d = -0.05", "VALUES 5"); assertQuery(select + " WHERE d_trunc = -0.10", "VALUES (-0.10, 1, -0.05, -0.05, 5, 5)"); // Exercise IcebergMetadata.applyFilter with non-empty Constraint.predicate, via non-pushdownable predicates assertQuery( "SELECT * FROM test_truncate_decimal_transform WHERE d * 100 % 10 = 9 AND b % 7 = 3", "VALUES (12.29, 3)"); assertThat(query("SHOW STATS FOR test_truncate_decimal_transform")) .projected(0, 2, 3, 4, 5, 6) // ignore data size which is available for Parquet, but not for ORC .skippingTypesCheck() .matches("VALUES " + " ('d', NULL, 0e0, NULL, '-0.05', '12.34'), " + " ('b', NULL, 0e0, NULL, '1', '5'), " + " (NULL, NULL, NULL, 5e0, NULL, NULL)"); dropTable("test_truncate_decimal_transform"); } @Test public void testBucketTransform() { String select = "SELECT d_bucket, row_count, d.min AS d_min, d.max AS d_max, b.min AS b_min, b.max AS b_max FROM \"test_bucket_transform$partitions\""; assertUpdate("CREATE TABLE test_bucket_transform (d VARCHAR, b BIGINT) WITH (partitioning = ARRAY['bucket(d, 2)'])"); assertUpdate( "INSERT INTO test_bucket_transform VALUES" + "('abcd', 1)," + "('abxy', 2)," + "('ab598', 3)," + "('mommy', 4)," + "('moscow', 5)," + "('Greece', 6)," + "('Grozny', 7)", 7); assertQuery("SELECT COUNT(*) FROM \"test_bucket_transform$partitions\"", "SELECT 2"); assertQuery(select + " WHERE d_bucket = 0", "VALUES(0, 3, 'Grozny', 'mommy', 1, 7)"); assertQuery(select + " WHERE d_bucket = 1", "VALUES(1, 4, 'Greece', 'moscow', 2, 6)"); // Exercise IcebergMetadata.applyFilter with non-empty Constraint.predicate, via non-pushdownable predicates assertQuery( "SELECT * FROM test_bucket_transform WHERE length(d) = 4 AND b % 7 = 2", "VALUES ('abxy', 2)"); assertThat(query("SHOW STATS FOR test_bucket_transform")) .projected(0, 2, 3, 4, 5, 6) // ignore data size which is available for Parquet, but not for ORC .skippingTypesCheck() .matches("VALUES " + " ('d', NULL, 0e0, NULL, NULL, NULL), " + " ('b', NULL, 0e0, NULL, '1', '7'), " + " (NULL, NULL, NULL, 7e0, NULL, NULL)"); dropTable("test_bucket_transform"); } @Test public void testMetadataDeleteSimple() { assertUpdate("CREATE TABLE test_metadata_delete_simple (col1 BIGINT, col2 BIGINT) WITH (partitioning = ARRAY['col1'])"); assertUpdate("INSERT INTO test_metadata_delete_simple VALUES(1, 100), (1, 101), (1, 102), (2, 200), (2, 201), (3, 300)", 6); assertQueryFails( "DELETE FROM test_metadata_delete_simple WHERE col1 = 1 AND col2 > 101", "This connector only supports delete where one or more partitions are deleted entirely"); assertQuery("SELECT sum(col2) FROM test_metadata_delete_simple", "SELECT 1004"); assertQuery("SELECT count(*) FROM \"test_metadata_delete_simple$partitions\"", "SELECT 3"); assertUpdate("DELETE FROM test_metadata_delete_simple WHERE col1 = 1"); assertQuery("SELECT sum(col2) FROM test_metadata_delete_simple", "SELECT 701"); assertQuery("SELECT count(*) FROM \"test_metadata_delete_simple$partitions\"", "SELECT 2"); dropTable("test_metadata_delete_simple"); } @Test public void testMetadataDelete() { assertUpdate("CREATE TABLE test_metadata_delete (" + " orderkey BIGINT," + " linenumber INTEGER," + " linestatus VARCHAR" + ") " + "WITH (" + " partitioning = ARRAY[ 'linenumber', 'linestatus' ]" + ")"); assertUpdate( "" + "INSERT INTO test_metadata_delete " + "SELECT orderkey, linenumber, linestatus " + "FROM tpch.tiny.lineitem", "SELECT count(*) FROM lineitem"); assertQuery("SELECT COUNT(*) FROM \"test_metadata_delete$partitions\"", "SELECT 14"); assertUpdate("DELETE FROM test_metadata_delete WHERE linestatus = 'F' AND linenumber = 3"); assertQuery("SELECT * FROM test_metadata_delete", "SELECT orderkey, linenumber, linestatus FROM lineitem WHERE linestatus <> 'F' or linenumber <> 3"); assertQuery("SELECT count(*) FROM \"test_metadata_delete$partitions\"", "SELECT 13"); assertUpdate("DELETE FROM test_metadata_delete WHERE linestatus='O'"); assertQuery("SELECT count(*) FROM \"test_metadata_delete$partitions\"", "SELECT 6"); assertQuery("SELECT * FROM test_metadata_delete", "SELECT orderkey, linenumber, linestatus FROM lineitem WHERE linestatus <> 'O' AND linenumber <> 3"); assertQueryFails("DELETE FROM test_metadata_delete WHERE orderkey=1", "This connector only supports delete where one or more partitions are deleted entirely"); dropTable("test_metadata_delete"); } @Test public void testInSet() { testInSet(31); testInSet(35); } private void testInSet(int inCount) { String values = range(1, inCount + 1) .mapToObj(n -> format("(%s, %s)", n, n + 10)) .collect(joining(", ")); String inList = range(1, inCount + 1) .mapToObj(Integer::toString) .collect(joining(", ")); assertUpdate("CREATE TABLE test_in_set (col1 INTEGER, col2 BIGINT)"); assertUpdate(format("INSERT INTO test_in_set VALUES %s", values), inCount); // This proves that SELECTs with large IN phrases work correctly computeActual(format("SELECT col1 FROM test_in_set WHERE col1 IN (%s)", inList)); dropTable("test_in_set"); } @Test public void testBasicTableStatistics() { String tableName = "test_basic_table_statistics"; assertUpdate(format("CREATE TABLE %s (col REAL)", tableName)); String insertStart = format("INSERT INTO %s", tableName); assertUpdate(insertStart + " VALUES -10", 1); assertUpdate(insertStart + " VALUES 100", 1); // SHOW STATS returns rows of the form: column_name, data_size, distinct_values_count, nulls_fractions, row_count, low_value, high_value MaterializedResult result = computeActual("SHOW STATS FOR " + tableName); MaterializedResult expectedStatistics = resultBuilder(getSession(), VARCHAR, DOUBLE, DOUBLE, DOUBLE, DOUBLE, VARCHAR, VARCHAR) .row("col", null, null, 0.0, null, "-10.0", "100.0") .row(null, null, null, null, 2.0, null, null) .build(); assertEquals(result, expectedStatistics); assertUpdate(insertStart + " VALUES 200", 1); result = computeActual("SHOW STATS FOR " + tableName); expectedStatistics = resultBuilder(getSession(), VARCHAR, DOUBLE, DOUBLE, DOUBLE, DOUBLE, VARCHAR, VARCHAR) .row("col", null, null, 0.0, null, "-10.0", "200.0") .row(null, null, null, null, 3.0, null, null) .build(); assertEquals(result, expectedStatistics); dropTable(tableName); } @Test public void testMultipleColumnTableStatistics() { String tableName = "test_multiple_table_statistics"; assertUpdate(format("CREATE TABLE %s (col1 REAL, col2 INTEGER, col3 DATE)", tableName)); String insertStart = format("INSERT INTO %s", tableName); assertUpdate(insertStart + " VALUES (-10, -1, DATE '2019-06-28')", 1); assertUpdate(insertStart + " VALUES (100, 10, DATE '2020-01-01')", 1); MaterializedResult result = computeActual("SHOW STATS FOR " + tableName); MaterializedResult expectedStatistics = resultBuilder(getSession(), VARCHAR, DOUBLE, DOUBLE, DOUBLE, DOUBLE, VARCHAR, VARCHAR) .row("col1", null, null, 0.0, null, "-10.0", "100.0") .row("col2", null, null, 0.0, null, "-1", "10") .row("col3", null, null, 0.0, null, "2019-06-28", "2020-01-01") .row(null, null, null, null, 2.0, null, null) .build(); assertEquals(result, expectedStatistics); assertUpdate(insertStart + " VALUES (200, 20, DATE '2020-06-28')", 1); result = computeActual("SHOW STATS FOR " + tableName); expectedStatistics = resultBuilder(getSession(), VARCHAR, DOUBLE, DOUBLE, DOUBLE, DOUBLE, VARCHAR, VARCHAR) .row("col1", null, null, 0.0, null, "-10.0", "200.0") .row("col2", null, null, 0.0, null, "-1", "20") .row("col3", null, null, 0.0, null, "2019-06-28", "2020-06-28") .row(null, null, null, null, 3.0, null, null) .build(); assertEquals(result, expectedStatistics); assertUpdate(insertStart + " VALUES " + IntStream.rangeClosed(21, 25) .mapToObj(i -> format("(200, %d, DATE '2020-07-%d')", i, i)) .collect(joining(", ")), 5); assertUpdate(insertStart + " VALUES " + IntStream.rangeClosed(26, 30) .mapToObj(i -> format("(NULL, %d, DATE '2020-06-%d')", i, i)) .collect(joining(", ")), 5); result = computeActual("SHOW STATS FOR " + tableName); expectedStatistics = resultBuilder(getSession(), VARCHAR, DOUBLE, DOUBLE, DOUBLE, DOUBLE, VARCHAR, VARCHAR) .row("col1", null, null, 5.0 / 13.0, null, "-10.0", "200.0") .row("col2", null, null, 0.0, null, "-1", "30") .row("col3", null, null, 0.0, null, "2019-06-28", "2020-07-25") .row(null, null, null, null, 13.0, null, null) .build(); assertEquals(result, expectedStatistics); dropTable(tableName); } @Test public void testPartitionedTableStatistics() { assertUpdate("CREATE TABLE iceberg.tpch.test_partitioned_table_statistics (col1 REAL, col2 BIGINT) WITH (partitioning = ARRAY['col2'])"); String insertStart = "INSERT INTO test_partitioned_table_statistics"; assertUpdate(insertStart + " VALUES (-10, -1)", 1); assertUpdate(insertStart + " VALUES (100, 10)", 1); MaterializedResult result = computeActual("SHOW STATS FOR iceberg.tpch.test_partitioned_table_statistics"); assertEquals(result.getRowCount(), 3); MaterializedRow row0 = result.getMaterializedRows().get(0); assertEquals(row0.getField(0), "col1"); assertEquals(row0.getField(3), 0.0); assertEquals(row0.getField(5), "-10.0"); assertEquals(row0.getField(6), "100.0"); MaterializedRow row1 = result.getMaterializedRows().get(1); assertEquals(row1.getField(0), "col2"); assertEquals(row1.getField(3), 0.0); assertEquals(row1.getField(5), "-1"); assertEquals(row1.getField(6), "10"); MaterializedRow row2 = result.getMaterializedRows().get(2); assertEquals(row2.getField(4), 2.0); assertUpdate(insertStart + " VALUES " + IntStream.rangeClosed(1, 5) .mapToObj(i -> format("(%d, 10)", i + 100)) .collect(joining(", ")), 5); assertUpdate(insertStart + " VALUES " + IntStream.rangeClosed(6, 10) .mapToObj(i -> "(NULL, 10)") .collect(joining(", ")), 5); result = computeActual("SHOW STATS FOR iceberg.tpch.test_partitioned_table_statistics"); assertEquals(result.getRowCount(), 3); row0 = result.getMaterializedRows().get(0); assertEquals(row0.getField(0), "col1"); assertEquals(row0.getField(3), 5.0 / 12.0); assertEquals(row0.getField(5), "-10.0"); assertEquals(row0.getField(6), "105.0"); row1 = result.getMaterializedRows().get(1); assertEquals(row1.getField(0), "col2"); assertEquals(row1.getField(3), 0.0); assertEquals(row1.getField(5), "-1"); assertEquals(row1.getField(6), "10"); row2 = result.getMaterializedRows().get(2); assertEquals(row2.getField(4), 12.0); assertUpdate(insertStart + " VALUES " + IntStream.rangeClosed(6, 10) .mapToObj(i -> "(100, NULL)") .collect(joining(", ")), 5); result = computeActual("SHOW STATS FOR iceberg.tpch.test_partitioned_table_statistics"); row0 = result.getMaterializedRows().get(0); assertEquals(row0.getField(0), "col1"); assertEquals(row0.getField(3), 5.0 / 17.0); assertEquals(row0.getField(5), "-10.0"); assertEquals(row0.getField(6), "105.0"); row1 = result.getMaterializedRows().get(1); assertEquals(row1.getField(0), "col2"); assertEquals(row1.getField(3), 5.0 / 17.0); assertEquals(row1.getField(5), "-1"); assertEquals(row1.getField(6), "10"); row2 = result.getMaterializedRows().get(2); assertEquals(row2.getField(4), 17.0); dropTable("iceberg.tpch.test_partitioned_table_statistics"); } @Test public void testStatisticsConstraints() { String tableName = "iceberg.tpch.test_simple_partitioned_table_statistics"; assertUpdate("CREATE TABLE iceberg.tpch.test_simple_partitioned_table_statistics (col1 BIGINT, col2 BIGINT) WITH (partitioning = ARRAY['col1'])"); String insertStart = "INSERT INTO iceberg.tpch.test_simple_partitioned_table_statistics"; assertUpdate(insertStart + " VALUES (1, 101), (2, 102), (3, 103), (4, 104)", 4); TableStatistics tableStatistics = getTableStatistics(tableName, new Constraint(TupleDomain.all())); IcebergColumnHandle col1Handle = getColumnHandleFromStatistics(tableStatistics, "col1"); // Constraint.predicate is currently not supported, because it's never provided by the engine. // TODO add (restore) test coverage when this changes. // predicate on a partition column assertThatThrownBy(() -> getTableStatistics(tableName, new Constraint( TupleDomain.all(), Optional.of(new TestRelationalNumberPredicate("col1", 3, i1 -> i1 >= 0)), Optional.of(ImmutableSet.of(col1Handle))))) .isInstanceOf(VerifyException.class) .hasMessage("Unexpected Constraint predicate"); // predicate on an unspecified set of columns column assertThatThrownBy(() -> getTableStatistics(tableName, new Constraint( TupleDomain.all(), Optional.of(new TestRelationalNumberPredicate("col2", 102, i -> i >= 0)), Optional.empty()))) .isInstanceOf(VerifyException.class) .hasMessage("Unexpected Constraint predicate"); dropTable(tableName); } @Test public void testPredicatePushdown() { QualifiedObjectName tableName = new QualifiedObjectName("iceberg", "tpch", "test_predicate"); assertUpdate(format("CREATE TABLE %s (col1 BIGINT, col2 BIGINT, col3 BIGINT) WITH (partitioning = ARRAY['col2', 'col3'])", tableName)); assertUpdate(format("INSERT INTO %s VALUES (1, 10, 100)", tableName), 1L); assertUpdate(format("INSERT INTO %s VALUES (2, 20, 200)", tableName), 1L); assertQuery(format("SELECT * FROM %s WHERE col1 = 1", tableName), "VALUES (1, 10, 100)"); assertFilterPushdown( tableName, ImmutableMap.of("col1", singleValue(BIGINT, 1L)), ImmutableMap.of(), ImmutableMap.of("col1", singleValue(BIGINT, 1L))); assertQuery(format("SELECT * FROM %s WHERE col2 = 10", tableName), "VALUES (1, 10, 100)"); assertFilterPushdown( tableName, ImmutableMap.of("col2", singleValue(BIGINT, 10L)), ImmutableMap.of("col2", singleValue(BIGINT, 10L)), ImmutableMap.of()); assertQuery(format("SELECT * FROM %s WHERE col1 = 1 AND col2 = 10", tableName), "VALUES (1, 10, 100)"); assertFilterPushdown( tableName, ImmutableMap.of("col1", singleValue(BIGINT, 1L), "col2", singleValue(BIGINT, 10L)), ImmutableMap.of("col2", singleValue(BIGINT, 10L)), ImmutableMap.of("col1", singleValue(BIGINT, 1L))); // Assert pushdown for an IN predicate with value count above the default compaction threshold List<Long> values = LongStream.range(1L, 1010L).boxed() .filter(index -> index != 20L) .collect(toImmutableList()); assertTrue(values.size() > ICEBERG_DOMAIN_COMPACTION_THRESHOLD); String valuesString = String.join(",", values.stream().map(Object::toString).collect(toImmutableList())); String inPredicate = "%s IN (" + valuesString + ")"; assertQuery( format("SELECT * FROM %s WHERE %s AND %s", tableName, format(inPredicate, "col1"), format(inPredicate, "col2")), "VALUES (1, 10, 100)"); assertFilterPushdown( tableName, ImmutableMap.of("col1", multipleValues(BIGINT, values), "col2", multipleValues(BIGINT, values)), ImmutableMap.of("col2", multipleValues(BIGINT, values)), // Unenforced predicate is simplified during split generation, but not reflected here ImmutableMap.of("col1", multipleValues(BIGINT, values))); dropTable(tableName.getObjectName()); } private void assertFilterPushdown( QualifiedObjectName tableName, Map<String, Domain> filter, Map<String, Domain> expectedEnforcedPredicate, Map<String, Domain> expectedUnenforcedPredicate) { Metadata metadata = getQueryRunner().getMetadata(); newTransaction().execute(getSession(), session -> { TableHandle table = metadata.getTableHandle(session, tableName) .orElseThrow(() -> new TableNotFoundException(tableName.asSchemaTableName())); Map<String, ColumnHandle> columns = metadata.getColumnHandles(session, table); TupleDomain<ColumnHandle> domains = TupleDomain.withColumnDomains( filter.entrySet().stream() .collect(toImmutableMap(entry -> columns.get(entry.getKey()), Map.Entry::getValue))); Optional<ConstraintApplicationResult<TableHandle>> result = metadata.applyFilter(session, table, new Constraint(domains)); assertTrue(result.isEmpty() == (expectedUnenforcedPredicate == null && expectedEnforcedPredicate == null)); if (result.isPresent()) { IcebergTableHandle newTable = (IcebergTableHandle) result.get().getHandle().getConnectorHandle(); assertEquals( newTable.getEnforcedPredicate(), TupleDomain.withColumnDomains(expectedEnforcedPredicate.entrySet().stream() .collect(toImmutableMap(entry -> columns.get(entry.getKey()), Map.Entry::getValue)))); assertEquals( newTable.getUnenforcedPredicate(), TupleDomain.withColumnDomains(expectedUnenforcedPredicate.entrySet().stream() .collect(toImmutableMap(entry -> columns.get(entry.getKey()), Map.Entry::getValue)))); } }); } private TransactionBuilder newTransaction() { return transaction(getQueryRunner().getTransactionManager(), getQueryRunner().getAccessControl()); } private static class TestRelationalNumberPredicate implements Predicate<Map<ColumnHandle, NullableValue>> { private final String columnName; private final Number comparand; private final Predicate<Integer> comparePredicate; public TestRelationalNumberPredicate(String columnName, Number comparand, Predicate<Integer> comparePredicate) { this.columnName = columnName; this.comparand = comparand; this.comparePredicate = comparePredicate; } @Override public boolean test(Map<ColumnHandle, NullableValue> nullableValues) { for (Map.Entry<ColumnHandle, NullableValue> entry : nullableValues.entrySet()) { IcebergColumnHandle handle = (IcebergColumnHandle) entry.getKey(); if (columnName.equals(handle.getName())) { Object object = entry.getValue().getValue(); if (object instanceof Long) { return comparePredicate.test(((Long) object).compareTo(comparand.longValue())); } if (object instanceof Double) { return comparePredicate.test(((Double) object).compareTo(comparand.doubleValue())); } throw new IllegalArgumentException(format("NullableValue is neither Long or Double, but %s", object)); } } return false; } } private ColumnStatistics getStatisticsForColumn(TableStatistics tableStatistics, String columnName) { for (Map.Entry<ColumnHandle, ColumnStatistics> entry : tableStatistics.getColumnStatistics().entrySet()) { IcebergColumnHandle handle = (IcebergColumnHandle) entry.getKey(); if (handle.getName().equals(columnName)) { return checkColumnStatistics(entry.getValue()); } } throw new IllegalArgumentException("TableStatistics did not contain column named " + columnName); } private static IcebergColumnHandle getColumnHandleFromStatistics(TableStatistics tableStatistics, String columnName) { for (ColumnHandle columnHandle : tableStatistics.getColumnStatistics().keySet()) { IcebergColumnHandle handle = (IcebergColumnHandle) columnHandle; if (handle.getName().equals(columnName)) { return handle; } } throw new IllegalArgumentException("TableStatistics did not contain column named " + columnName); } private ColumnStatistics checkColumnStatistics(ColumnStatistics statistics) { assertNotNull(statistics, "statistics is null"); // Sadly, statistics.getDataSize().isUnknown() for columns in ORC files. See the TODO // in IcebergOrcFileWriter. if (format == ORC) { assertTrue(statistics.getDataSize().isUnknown()); } else { assertFalse(statistics.getDataSize().isUnknown()); } assertFalse(statistics.getNullsFraction().isUnknown(), "statistics nulls fraction is unknown"); assertFalse(statistics.getRange().isEmpty(), "statistics range is not present"); return statistics; } private TableStatistics getTableStatistics(String tableName, Constraint constraint) { Metadata metadata = getDistributedQueryRunner().getCoordinator().getMetadata(); QualifiedObjectName qualifiedName = QualifiedObjectName.valueOf(tableName); return transaction(getQueryRunner().getTransactionManager(), getQueryRunner().getAccessControl()) .execute(getSession(), session -> { Optional<TableHandle> optionalHandle = metadata.getTableHandle(session, qualifiedName); checkArgument(optionalHandle.isPresent(), "Could not create table handle for table %s", tableName); return metadata.getTableStatistics(session, optionalHandle.get(), constraint); }); } @Test public void testCreateNestedPartitionedTable() { assertUpdate("CREATE TABLE test_nested_table_1 (" + " bool BOOLEAN" + ", int INTEGER" + ", arr ARRAY(VARCHAR)" + ", big BIGINT" + ", rl REAL" + ", dbl DOUBLE" + ", mp MAP(INTEGER, VARCHAR)" + ", dec DECIMAL(5,2)" + ", vc VARCHAR" + ", vb VARBINARY" + ", ts TIMESTAMP(6)" + ", str ROW(id INTEGER , vc VARCHAR)" + ", dt DATE)" + " WITH (partitioning = ARRAY['int'])"); assertUpdate( "INSERT INTO test_nested_table_1 " + " select true, 1, array['uno', 'dos', 'tres'], BIGINT '1', REAL '1.0', DOUBLE '1.0', map(array[1,2,3,4], array['ek','don','teen','char'])," + " CAST(1.0 as DECIMAL(5,2))," + " 'one', VARBINARY 'binary0/1values',\n" + " TIMESTAMP '2021-07-24 02:43:57.348000'," + " (CAST(ROW(null, 'this is a random value') AS ROW(int, varchar))), " + " DATE '2021-07-24'", 1); assertEquals(computeActual("SELECT * from test_nested_table_1").getRowCount(), 1); assertThat(query("SHOW STATS FOR test_nested_table_1")) .projected(0, 2, 3, 4, 5, 6) // ignore data size which is available for Parquet, but not for ORC .skippingTypesCheck() .matches("VALUES " + " ('bool', NULL, 0e0, NULL, NULL, NULL), " + " ('int', NULL, 0e0, NULL, '1', '1'), " + " ('arr', NULL, " + (format == ORC ? "0e0" : "NULL") + ", NULL, NULL, NULL), " + " ('big', NULL, 0e0, NULL, '1', '1'), " + " ('rl', NULL, 0e0, NULL, '1.0', '1.0'), " + " ('dbl', NULL, 0e0, NULL, '1.0', '1.0'), " + " ('mp', NULL, " + (format == ORC ? "0e0" : "NULL") + ", NULL, NULL, NULL), " + " ('dec', NULL, 0e0, NULL, '1.0', '1.0'), " + " ('vc', NULL, 0e0, NULL, NULL, NULL), " + " ('vb', NULL, 0e0, NULL, NULL, NULL), " + " ('ts', NULL, 0e0, NULL, " + (format == ORC ? "NULL, NULL" : "'2021-07-24 02:43:57.348000', '2021-07-24 02:43:57.348000'") + "), " + " ('str', NULL, " + (format == ORC ? "0e0" : "NULL") + ", NULL, NULL, NULL), " + " ('dt', NULL, 0e0, NULL, '2021-07-24', '2021-07-24'), " + " (NULL, NULL, NULL, 1e0, NULL, NULL)"); dropTable("test_nested_table_1"); assertUpdate("" + "CREATE TABLE test_nested_table_2 (" + " int INTEGER" + ", arr ARRAY(ROW(id INTEGER, vc VARCHAR))" + ", big BIGINT" + ", rl REAL" + ", dbl DOUBLE" + ", mp MAP(INTEGER, ARRAY(VARCHAR))" + ", dec DECIMAL(5,2)" + ", str ROW(id INTEGER, vc VARCHAR, arr ARRAY(INTEGER))" + ", vc VARCHAR)" + " WITH (partitioning = ARRAY['int'])"); assertUpdate( "INSERT INTO test_nested_table_2 " + " select 1, array[cast(row(1, null) as row(int, varchar)), cast(row(2, 'dos') as row(int, varchar))], BIGINT '1', REAL '1.0', DOUBLE '1.0', " + "map(array[1,2], array[array['ek', 'one'], array['don', 'do', 'two']]), CAST(1.0 as DECIMAL(5,2)), " + "CAST(ROW(1, 'this is a random value', null) AS ROW(int, varchar, array(int))), 'one'", 1); assertEquals(computeActual("SELECT * from test_nested_table_2").getRowCount(), 1); assertThat(query("SHOW STATS FOR test_nested_table_2")) .projected(0, 2, 3, 4, 5, 6) // ignore data size which is available for Parquet, but not for ORC .skippingTypesCheck() .matches("VALUES " + " ('int', NULL, 0e0, NULL, '1', '1'), " + " ('arr', NULL, " + (format == ORC ? "0e0" : "NULL") + ", NULL, NULL, NULL), " + " ('big', NULL, 0e0, NULL, '1', '1'), " + " ('rl', NULL, 0e0, NULL, '1.0', '1.0'), " + " ('dbl', NULL, 0e0, NULL, '1.0', '1.0'), " + " ('mp', NULL, " + (format == ORC ? "0e0" : "NULL") + ", NULL, NULL, NULL), " + " ('dec', NULL, 0e0, NULL, '1.0', '1.0'), " + " ('vc', NULL, 0e0, NULL, NULL, NULL), " + " ('str', NULL, " + (format == ORC ? "0e0" : "NULL") + ", NULL, NULL, NULL), " + " (NULL, NULL, NULL, 1e0, NULL, NULL)"); assertUpdate("CREATE TABLE test_nested_table_3 WITH (partitioning = ARRAY['int']) AS SELECT * FROM test_nested_table_2", 1); assertEquals(computeActual("SELECT * FROM test_nested_table_3").getRowCount(), 1); assertThat(query("SHOW STATS FOR test_nested_table_3")) .matches("SHOW STATS FOR test_nested_table_2"); dropTable("test_nested_table_2"); dropTable("test_nested_table_3"); } @Test public void testSerializableReadIsolation() { assertUpdate("CREATE TABLE test_read_isolation (x int)"); assertUpdate("INSERT INTO test_read_isolation VALUES 123, 456", 2); withTransaction(session -> { assertQuery(session, "SELECT * FROM test_read_isolation", "VALUES 123, 456"); assertUpdate("INSERT INTO test_read_isolation VALUES 789", 1); assertQuery("SELECT * FROM test_read_isolation", "VALUES 123, 456, 789"); assertQuery(session, "SELECT * FROM test_read_isolation", "VALUES 123, 456"); }); assertQuery("SELECT * FROM test_read_isolation", "VALUES 123, 456, 789"); dropTable("test_read_isolation"); } private void withTransaction(Consumer<Session> consumer) { transaction(getQueryRunner().getTransactionManager(), getQueryRunner().getAccessControl()) .readCommitted() .execute(getSession(), consumer); } private void dropTable(String table) { Session session = getSession(); assertUpdate(session, "DROP TABLE " + table); assertFalse(getQueryRunner().tableExists(session, table)); } @Test public void testOptimizedMetadataQueries() { Session session = Session.builder(getSession()) .setSystemProperty("optimize_metadata_queries", "true") .build(); assertUpdate("CREATE TABLE test_metadata_optimization (a BIGINT, b BIGINT, c BIGINT) WITH (PARTITIONING = ARRAY['b', 'c'])"); assertUpdate("INSERT INTO test_metadata_optimization VALUES (5, 6, 7), (8, 9, 10)", 2); assertQuery(session, "SELECT DISTINCT b FROM test_metadata_optimization", "VALUES (6), (9)"); assertQuery(session, "SELECT DISTINCT b, c FROM test_metadata_optimization", "VALUES (6, 7), (9, 10)"); assertQuery(session, "SELECT DISTINCT b FROM test_metadata_optimization WHERE b < 7", "VALUES (6)"); assertQuery(session, "SELECT DISTINCT b FROM test_metadata_optimization WHERE c > 8", "VALUES (9)"); // Assert behavior after metadata delete assertUpdate("DELETE FROM test_metadata_optimization WHERE b = 6"); assertQuery(session, "SELECT DISTINCT b FROM test_metadata_optimization", "VALUES (9)"); // TODO: assert behavior after deleting the last row of a partition, once row-level deletes are supported. // i.e. a query like 'DELETE FROM test_metadata_optimization WHERE b = 6 AND a = 5' dropTable("test_metadata_optimization"); } @Test public void testIncorrectIcebergFileSizes() throws Exception { // Create a table with a single insert assertUpdate("CREATE TABLE test_iceberg_file_size (x BIGINT) WITH (format='PARQUET')"); assertUpdate("INSERT INTO test_iceberg_file_size VALUES (123), (456), (758)", 3); // Get manifest file MaterializedResult result = computeActual("SELECT path FROM \"test_iceberg_file_size$manifests\""); assertEquals(result.getRowCount(), 1); String manifestFile = (String) result.getOnlyValue(); // Read manifest file Schema schema; GenericData.Record entry = null; try (DataFileReader<GenericData.Record> dataFileReader = new DataFileReader<>(new File(manifestFile), new GenericDatumReader<>())) { schema = dataFileReader.getSchema(); int recordCount = 0; while (dataFileReader.hasNext()) { entry = dataFileReader.next(); recordCount++; } assertEquals(recordCount, 1); } // Alter data file entry to store incorrect file size GenericData.Record dataFile = (GenericData.Record) entry.get("data_file"); long alteredValue = 50L; assertNotEquals((long) dataFile.get("file_size_in_bytes"), alteredValue); dataFile.put("file_size_in_bytes", alteredValue); // Replace the file through HDFS client. This is required for correct checksums. HdfsEnvironment.HdfsContext context = new HdfsContext(getSession().toConnectorSession()); Path manifestFilePath = new Path(manifestFile); FileSystem fs = HDFS_ENVIRONMENT.getFileSystem(context, manifestFilePath); // Write altered metadata try (OutputStream out = fs.create(manifestFilePath); DataFileWriter<GenericData.Record> dataFileWriter = new DataFileWriter<>(new GenericDatumWriter<>(schema))) { dataFileWriter.create(schema, out); dataFileWriter.append(entry); } // Ignoring Iceberg provided file size makes the query succeed Session session = Session.builder(getSession()) .setCatalogSessionProperty("iceberg", "use_file_size_from_metadata", "false") .build(); assertQuery(session, "SELECT * FROM test_iceberg_file_size", "VALUES (123), (456), (758)"); // Using Iceberg provided file size fails the query assertQueryFails("SELECT * FROM test_iceberg_file_size", format("Error reading tail from .* with length %d", alteredValue)); dropTable("test_iceberg_file_size"); } @Override protected TestTable createTableWithDefaultColumns() { throw new SkipException("Iceberg connector does not support column default values"); } @Override protected Optional<DataMappingTestSetup> filterDataMappingSmokeTestData(DataMappingTestSetup dataMappingTestSetup) { String typeName = dataMappingTestSetup.getTrinoTypeName(); if (typeName.equals("tinyint") || typeName.equals("smallint") || typeName.startsWith("char(")) { // These types are not supported by Iceberg return Optional.of(dataMappingTestSetup.asUnsupported()); } // According to Iceberg specification all time and timestamp values are stored with microsecond precision. if (typeName.equals("time")) { return Optional.of(new DataMappingTestSetup("time(6)", "TIME '15:03:00'", "TIME '23:59:59.999999'")); } if (typeName.equals("timestamp")) { return Optional.of(new DataMappingTestSetup("timestamp(6)", "TIMESTAMP '2020-02-12 15:03:00'", "TIMESTAMP '2199-12-31 23:59:59.999999'")); } if (typeName.equals("timestamp(3) with time zone")) { return Optional.of(new DataMappingTestSetup("timestamp(6) with time zone", "TIMESTAMP '2020-02-12 15:03:00 +01:00'", "TIMESTAMP '9999-12-31 23:59:59.999999 +12:00'")); } return Optional.of(dataMappingTestSetup); } @Override protected Optional<DataMappingTestSetup> filterCaseSensitiveDataMappingTestData(DataMappingTestSetup dataMappingTestSetup) { String typeName = dataMappingTestSetup.getTrinoTypeName(); if (typeName.equals("char(1)")) { return Optional.of(dataMappingTestSetup.asUnsupported()); } return Optional.of(dataMappingTestSetup); } }
plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/BaseIcebergConnectorTest.java
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.trino.plugin.iceberg; import com.google.common.base.VerifyException; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import io.trino.Session; import io.trino.metadata.Metadata; import io.trino.metadata.QualifiedObjectName; import io.trino.metadata.TableHandle; import io.trino.plugin.hive.HdfsEnvironment; import io.trino.spi.connector.ColumnHandle; import io.trino.spi.connector.Constraint; import io.trino.spi.connector.ConstraintApplicationResult; import io.trino.spi.connector.TableNotFoundException; import io.trino.spi.predicate.Domain; import io.trino.spi.predicate.NullableValue; import io.trino.spi.predicate.TupleDomain; import io.trino.spi.statistics.ColumnStatistics; import io.trino.spi.statistics.TableStatistics; import io.trino.testing.BaseConnectorTest; import io.trino.testing.MaterializedResult; import io.trino.testing.MaterializedRow; import io.trino.testing.QueryRunner; import io.trino.testing.TestingConnectorBehavior; import io.trino.testing.sql.TestTable; import io.trino.transaction.TransactionBuilder; import org.apache.avro.Schema; import org.apache.avro.file.DataFileReader; import org.apache.avro.file.DataFileWriter; import org.apache.avro.generic.GenericData; import org.apache.avro.generic.GenericDatumReader; import org.apache.avro.generic.GenericDatumWriter; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.iceberg.FileFormat; import org.intellij.lang.annotations.Language; import org.testng.SkipException; import org.testng.annotations.DataProvider; import org.testng.annotations.Test; import java.io.File; import java.io.OutputStream; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.function.Consumer; import java.util.function.Predicate; import java.util.regex.Matcher; import java.util.regex.Pattern; import java.util.stream.IntStream; import java.util.stream.LongStream; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.collect.ImmutableList.toImmutableList; import static com.google.common.collect.ImmutableMap.toImmutableMap; import static com.google.common.collect.Iterables.getOnlyElement; import static io.trino.plugin.hive.HdfsEnvironment.HdfsContext; import static io.trino.plugin.hive.HiveTestUtils.HDFS_ENVIRONMENT; import static io.trino.plugin.iceberg.IcebergQueryRunner.createIcebergQueryRunner; import static io.trino.plugin.iceberg.IcebergSplitManager.ICEBERG_DOMAIN_COMPACTION_THRESHOLD; import static io.trino.spi.predicate.Domain.multipleValues; import static io.trino.spi.predicate.Domain.singleValue; import static io.trino.spi.type.BigintType.BIGINT; import static io.trino.spi.type.DoubleType.DOUBLE; import static io.trino.spi.type.VarcharType.VARCHAR; import static io.trino.testing.MaterializedResult.resultBuilder; import static io.trino.testing.assertions.Assert.assertEquals; import static io.trino.transaction.TransactionBuilder.transaction; import static java.lang.String.format; import static java.util.Objects.requireNonNull; import static java.util.stream.Collectors.joining; import static java.util.stream.IntStream.range; import static org.apache.iceberg.FileFormat.ORC; import static org.apache.iceberg.FileFormat.PARQUET; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatThrownBy; import static org.testng.Assert.assertFalse; import static org.testng.Assert.assertNotEquals; import static org.testng.Assert.assertNotNull; import static org.testng.Assert.assertTrue; public abstract class BaseIcebergConnectorTest extends BaseConnectorTest { private static final Pattern WITH_CLAUSE_EXTRACTER = Pattern.compile(".*(WITH\\s*\\([^)]*\\))\\s*$", Pattern.DOTALL); private final FileFormat format; protected BaseIcebergConnectorTest(FileFormat format) { this.format = requireNonNull(format, "format is null"); } @Override protected QueryRunner createQueryRunner() throws Exception { return createIcebergQueryRunner(ImmutableMap.of(), format, REQUIRED_TPCH_TABLES); } @Override protected boolean hasBehavior(TestingConnectorBehavior connectorBehavior) { switch (connectorBehavior) { case SUPPORTS_COMMENT_ON_COLUMN: case SUPPORTS_TOPN_PUSHDOWN: return false; case SUPPORTS_CREATE_VIEW: return true; case SUPPORTS_CREATE_MATERIALIZED_VIEW: return true; case SUPPORTS_DELETE: return true; default: return super.hasBehavior(connectorBehavior); } } @Test @Override public void testDelete() { // Deletes are covered with testMetadataDelete test methods assertThatThrownBy(super::testDelete) .hasStackTraceContaining("This connector only supports delete where one or more partitions are deleted entirely"); } @Override public void testDeleteWithComplexPredicate() { // Deletes are covered with testMetadataDelete test methods assertThatThrownBy(super::testDeleteWithComplexPredicate) .hasStackTraceContaining("This connector only supports delete where one or more partitions are deleted entirely"); } @Override public void testDeleteWithSemiJoin() { // Deletes are covered with testMetadataDelete test methods assertThatThrownBy(super::testDeleteWithSemiJoin) .hasStackTraceContaining("This connector only supports delete where one or more partitions are deleted entirely"); } @Override public void testDeleteWithSubquery() { // Deletes are covered with testMetadataDelete test methods assertThatThrownBy(super::testDeleteWithSubquery) .hasStackTraceContaining("This connector only supports delete where one or more partitions are deleted entirely"); } @Override public void testDeleteWithVarcharPredicate() { // Deletes are covered with testMetadataDelete test methods assertThatThrownBy(super::testDeleteWithVarcharPredicate) .hasStackTraceContaining("This connector only supports delete where one or more partitions are deleted entirely"); } @Override public void testRowLevelDelete() { // Deletes are covered with testMetadataDelete test methods assertThatThrownBy(super::testRowLevelDelete) .hasStackTraceContaining("This connector only supports delete where one or more partitions are deleted entirely"); } @Test @Override public void testShowCreateSchema() { assertThat(computeActual("SHOW CREATE SCHEMA tpch").getOnlyValue().toString()) .matches("CREATE SCHEMA iceberg.tpch\n" + "AUTHORIZATION USER user\n" + "WITH \\(\n" + "\\s+location = '.*/iceberg_data/tpch'\n" + "\\)"); } @Override @Test public void testDescribeTable() { MaterializedResult expectedColumns = resultBuilder(getSession(), VARCHAR, VARCHAR, VARCHAR, VARCHAR) .row("orderkey", "bigint", "", "") .row("custkey", "bigint", "", "") .row("orderstatus", "varchar", "", "") .row("totalprice", "double", "", "") .row("orderdate", "date", "", "") .row("orderpriority", "varchar", "", "") .row("clerk", "varchar", "", "") .row("shippriority", "integer", "", "") .row("comment", "varchar", "", "") .build(); MaterializedResult actualColumns = computeActual("DESCRIBE orders"); assertEquals(actualColumns, expectedColumns); } @Override @Test public void testShowCreateTable() { assertThat(computeActual("SHOW CREATE TABLE orders").getOnlyValue()) .isEqualTo("CREATE TABLE iceberg.tpch.orders (\n" + " orderkey bigint,\n" + " custkey bigint,\n" + " orderstatus varchar,\n" + " totalprice double,\n" + " orderdate date,\n" + " orderpriority varchar,\n" + " clerk varchar,\n" + " shippriority integer,\n" + " comment varchar\n" + ")\n" + "WITH (\n" + " format = '" + format.name() + "'\n" + ")"); } @Override protected void checkInformationSchemaViewsForMaterializedView(String schemaName, String viewName) { // TODO should probably return materialized view, as it's also a view -- to be double checked assertThatThrownBy(() -> super.checkInformationSchemaViewsForMaterializedView(schemaName, viewName)) .hasMessageFindingMatch("(?s)Expecting.*to contain:.*\\Q[(" + viewName + ")]"); } @Test public void testDecimal() { testDecimalWithPrecisionAndScale(1, 0); testDecimalWithPrecisionAndScale(8, 6); testDecimalWithPrecisionAndScale(9, 8); testDecimalWithPrecisionAndScale(10, 8); testDecimalWithPrecisionAndScale(18, 1); testDecimalWithPrecisionAndScale(18, 8); testDecimalWithPrecisionAndScale(18, 17); testDecimalWithPrecisionAndScale(17, 16); testDecimalWithPrecisionAndScale(18, 17); testDecimalWithPrecisionAndScale(24, 10); testDecimalWithPrecisionAndScale(30, 10); testDecimalWithPrecisionAndScale(37, 26); testDecimalWithPrecisionAndScale(38, 37); testDecimalWithPrecisionAndScale(38, 17); testDecimalWithPrecisionAndScale(38, 37); } private void testDecimalWithPrecisionAndScale(int precision, int scale) { checkArgument(precision >= 1 && precision <= 38, "Decimal precision (%s) must be between 1 and 38 inclusive", precision); checkArgument(scale < precision && scale >= 0, "Decimal scale (%s) must be less than the precision (%s) and non-negative", scale, precision); String decimalType = format("DECIMAL(%d,%d)", precision, scale); String beforeTheDecimalPoint = "12345678901234567890123456789012345678".substring(0, precision - scale); String afterTheDecimalPoint = "09876543210987654321098765432109876543".substring(0, scale); String decimalValue = format("%s.%s", beforeTheDecimalPoint, afterTheDecimalPoint); assertUpdate(format("CREATE TABLE test_iceberg_decimal (x %s)", decimalType)); assertUpdate(format("INSERT INTO test_iceberg_decimal (x) VALUES (CAST('%s' AS %s))", decimalValue, decimalType), 1); assertQuery("SELECT * FROM test_iceberg_decimal", format("SELECT CAST('%s' AS %s)", decimalValue, decimalType)); dropTable("test_iceberg_decimal"); } @Test public void testTime() { testSelectOrPartitionedByTime(false); } @Test public void testPartitionedByTime() { testSelectOrPartitionedByTime(true); } private void testSelectOrPartitionedByTime(boolean partitioned) { String tableName = format("test_%s_by_time", partitioned ? "partitioned" : "selected"); String partitioning = partitioned ? "WITH(partitioning = ARRAY['x'])" : ""; assertUpdate(format("CREATE TABLE %s (x TIME(6), y BIGINT) %s", tableName, partitioning)); assertUpdate(format("INSERT INTO %s VALUES (TIME '10:12:34', 12345)", tableName), 1); assertQuery(format("SELECT COUNT(*) FROM %s", tableName), "SELECT 1"); assertQuery(format("SELECT x FROM %s", tableName), "SELECT CAST('10:12:34' AS TIME)"); assertUpdate(format("INSERT INTO %s VALUES (TIME '9:00:00', 67890)", tableName), 1); assertQuery(format("SELECT COUNT(*) FROM %s", tableName), "SELECT 2"); assertQuery(format("SELECT x FROM %s WHERE x = TIME '10:12:34'", tableName), "SELECT CAST('10:12:34' AS TIME)"); assertQuery(format("SELECT x FROM %s WHERE x = TIME '9:00:00'", tableName), "SELECT CAST('9:00:00' AS TIME)"); assertQuery(format("SELECT x FROM %s WHERE y = 12345", tableName), "SELECT CAST('10:12:34' AS TIME)"); assertQuery(format("SELECT x FROM %s WHERE y = 67890", tableName), "SELECT CAST('9:00:00' AS TIME)"); dropTable(tableName); } @Test public void testPartitionByTimestamp() { testSelectOrPartitionedByTimestamp(true); } @Test public void testSelectByTimestamp() { testSelectOrPartitionedByTimestamp(false); } private void testSelectOrPartitionedByTimestamp(boolean partitioned) { String tableName = format("test_%s_by_timestamp", partitioned ? "partitioned" : "selected"); assertUpdate(format("CREATE TABLE %s (_timestamp timestamp(6)) %s", tableName, partitioned ? "WITH (partitioning = ARRAY['_timestamp'])" : "")); @Language("SQL") String select1 = "SELECT TIMESTAMP '2017-05-01 10:12:34' _timestamp"; @Language("SQL") String select2 = "SELECT TIMESTAMP '2017-10-01 10:12:34' _timestamp"; @Language("SQL") String select3 = "SELECT TIMESTAMP '2018-05-01 10:12:34' _timestamp"; assertUpdate(format("INSERT INTO %s %s", tableName, select1), 1); assertUpdate(format("INSERT INTO %s %s", tableName, select2), 1); assertUpdate(format("INSERT INTO %s %s", tableName, select3), 1); assertQuery(format("SELECT COUNT(*) from %s", tableName), "SELECT 3"); assertQuery(format("SELECT * from %s WHERE _timestamp = TIMESTAMP '2017-05-01 10:12:34'", tableName), select1); assertQuery(format("SELECT * from %s WHERE _timestamp < TIMESTAMP '2017-06-01 10:12:34'", tableName), select1); assertQuery(format("SELECT * from %s WHERE _timestamp = TIMESTAMP '2017-10-01 10:12:34'", tableName), select2); assertQuery(format("SELECT * from %s WHERE _timestamp > TIMESTAMP '2017-06-01 10:12:34' AND _timestamp < TIMESTAMP '2018-05-01 10:12:34'", tableName), select2); assertQuery(format("SELECT * from %s WHERE _timestamp = TIMESTAMP '2018-05-01 10:12:34'", tableName), select3); assertQuery(format("SELECT * from %s WHERE _timestamp > TIMESTAMP '2018-01-01 10:12:34'", tableName), select3); dropTable(tableName); } @Test public void testCreatePartitionedTable() { assertUpdate("" + "CREATE TABLE test_partitioned_table (" + " _string VARCHAR" + ", _bigint BIGINT" + ", _integer INTEGER" + ", _real REAL" + ", _double DOUBLE" + ", _boolean BOOLEAN" + ", _decimal_short DECIMAL(3,2)" + ", _decimal_long DECIMAL(30,10)" + ", _timestamp TIMESTAMP(6)" + ", _date DATE" + ") " + "WITH (" + "partitioning = ARRAY[" + " '_string'," + " '_integer'," + " '_bigint'," + " '_boolean'," + " '_real'," + " '_double'," + " '_decimal_short', " + " '_decimal_long'," + " '_timestamp'," + " '_date']" + ")"); assertQueryReturnsEmptyResult("SELECT * FROM test_partitioned_table"); @Language("SQL") String select = "" + "SELECT" + " 'foo' _string" + ", CAST(123 AS BIGINT) _bigint" + ", 456 _integer" + ", CAST('123.45' AS REAL) _real" + ", CAST('3.14' AS DOUBLE) _double" + ", true _boolean" + ", CAST('3.14' AS DECIMAL(3,2)) _decimal_short" + ", CAST('12345678901234567890.0123456789' AS DECIMAL(30,10)) _decimal_long" + ", CAST('2017-05-01 10:12:34' AS TIMESTAMP) _timestamp" + ", CAST('2017-05-01' AS DATE) _date"; assertUpdate(format("INSERT INTO test_partitioned_table %s", select), 1); assertQuery("SELECT * FROM test_partitioned_table", select); assertQuery( "SELECT * FROM test_partitioned_table WHERE" + " 'foo' = _string" + " AND 456 = _integer" + " AND CAST(123 AS BIGINT) = _bigint" + " AND true = _boolean" + " AND CAST('3.14' AS DECIMAL(3,2)) = _decimal_short" + " AND CAST('12345678901234567890.0123456789' AS DECIMAL(30,10)) = _decimal_long" + " AND CAST('2017-05-01 10:12:34' AS TIMESTAMP) = _timestamp" + " AND CAST('2017-05-01' AS DATE) = _date", select); dropTable("test_partitioned_table"); } @Test public void testCreatePartitionedTableWithNestedTypes() { assertUpdate("" + "CREATE TABLE test_partitioned_table_nested_type (" + " _string VARCHAR" + ", _struct ROW(_field1 INT, _field2 VARCHAR)" + ", _date DATE" + ") " + "WITH (" + " partitioning = ARRAY['_date']" + ")"); dropTable("test_partitioned_table_nested_type"); } @Test public void testPartitionedTableWithNullValues() { assertUpdate("CREATE TABLE test_partitioned_table_with_null_values (" + " _string VARCHAR" + ", _bigint BIGINT" + ", _integer INTEGER" + ", _real REAL" + ", _double DOUBLE" + ", _boolean BOOLEAN" + ", _decimal_short DECIMAL(3,2)" + ", _decimal_long DECIMAL(30,10)" + ", _timestamp TIMESTAMP(6)" + ", _date DATE" + ") " + "WITH (" + "partitioning = ARRAY[" + " '_string'," + " '_integer'," + " '_bigint'," + " '_boolean'," + " '_real'," + " '_double'," + " '_decimal_short', " + " '_decimal_long'," + " '_timestamp'," + " '_date']" + ")"); assertQueryReturnsEmptyResult("SELECT * from test_partitioned_table_with_null_values"); @Language("SQL") String select = "" + "SELECT" + " null _string" + ", null _bigint" + ", null _integer" + ", null _real" + ", null _double" + ", null _boolean" + ", null _decimal_short" + ", null _decimal_long" + ", null _timestamp" + ", null _date"; assertUpdate("INSERT INTO test_partitioned_table_with_null_values " + select, 1); assertQuery("SELECT * from test_partitioned_table_with_null_values", select); dropTable("test_partitioned_table_with_null_values"); } @Test public void testCreatePartitionedTableAs() { assertUpdate( "CREATE TABLE test_create_partitioned_table_as " + "WITH (" + "partitioning = ARRAY['ORDER_STATUS', 'Ship_Priority', 'Bucket(order_key,9)']" + ") " + "AS " + "SELECT orderkey AS order_key, shippriority AS ship_priority, orderstatus AS order_status " + "FROM tpch.tiny.orders", "SELECT count(*) from orders"); assertEquals( computeScalar("SHOW CREATE TABLE test_create_partitioned_table_as"), format( "CREATE TABLE %s.%s.%s (\n" + " order_key bigint,\n" + " ship_priority integer,\n" + " order_status varchar\n" + ")\n" + "WITH (\n" + " format = '%s',\n" + " partitioning = ARRAY['order_status','ship_priority','bucket(order_key, 9)']\n" + ")", getSession().getCatalog().orElseThrow(), getSession().getSchema().orElseThrow(), "test_create_partitioned_table_as", format)); assertQuery("SELECT * from test_create_partitioned_table_as", "SELECT orderkey, shippriority, orderstatus FROM orders"); dropTable("test_create_partitioned_table_as"); } @Test public void testColumnComments() { // TODO add support for setting comments on existing column and replace the test with io.trino.testing.AbstractTestDistributedQueries#testCommentColumn assertUpdate("CREATE TABLE test_column_comments (_bigint BIGINT COMMENT 'test column comment')"); assertQuery( "SHOW COLUMNS FROM test_column_comments", "VALUES ('_bigint', 'bigint', '', 'test column comment')"); dropTable("test_column_comments"); } @Test public void testTableComments() { String createTableTemplate = "" + "CREATE TABLE iceberg.tpch.test_table_comments (\n" + " _x bigint\n" + ")\n" + "COMMENT '%s'\n" + "WITH (\n" + format(" format = '%s'\n", format) + ")"; @Language("SQL") String createTableSql = format(createTableTemplate, "test table comment", format); assertUpdate(createTableSql); MaterializedResult resultOfCreate = computeActual("SHOW CREATE TABLE test_table_comments"); assertEquals(getOnlyElement(resultOfCreate.getOnlyColumnAsSet()), createTableSql); assertUpdate("COMMENT ON TABLE test_table_comments IS 'different test table comment'"); MaterializedResult resultOfCommentChange = computeActual("SHOW CREATE TABLE test_table_comments"); String afterChangeSql = format(createTableTemplate, "different test table comment", format); assertEquals(getOnlyElement(resultOfCommentChange.getOnlyColumnAsSet()), afterChangeSql); dropTable("iceberg.tpch.test_table_comments"); String createTableWithoutComment = "" + "CREATE TABLE iceberg.tpch.test_table_comments (\n" + " _x bigint\n" + ")\n" + "WITH (\n" + " format = 'ORC'\n" + ")"; assertUpdate(format(createTableWithoutComment, format)); assertUpdate("COMMENT ON TABLE test_table_comments IS NULL"); MaterializedResult resultOfRemovingComment = computeActual("SHOW CREATE TABLE test_table_comments"); assertEquals(getOnlyElement(resultOfRemovingComment.getOnlyColumnAsSet()), format(createTableWithoutComment, format)); dropTable("iceberg.tpch.test_table_comments"); } @Test public void testRollbackSnapshot() { assertUpdate("CREATE TABLE test_rollback (col0 INTEGER, col1 BIGINT)"); long afterCreateTableId = getLatestSnapshotId("test_rollback"); assertUpdate("INSERT INTO test_rollback (col0, col1) VALUES (123, CAST(987 AS BIGINT))", 1); long afterFirstInsertId = getLatestSnapshotId("test_rollback"); assertUpdate("INSERT INTO test_rollback (col0, col1) VALUES (456, CAST(654 AS BIGINT))", 1); assertQuery("SELECT * FROM test_rollback ORDER BY col0", "VALUES (123, CAST(987 AS BIGINT)), (456, CAST(654 AS BIGINT))"); assertUpdate(format("CALL system.rollback_to_snapshot('tpch', 'test_rollback', %s)", afterFirstInsertId)); assertQuery("SELECT * FROM test_rollback ORDER BY col0", "VALUES (123, CAST(987 AS BIGINT))"); assertUpdate(format("CALL system.rollback_to_snapshot('tpch', 'test_rollback', %s)", afterCreateTableId)); assertEquals((long) computeActual("SELECT COUNT(*) FROM test_rollback").getOnlyValue(), 0); dropTable("test_rollback"); } private long getLatestSnapshotId(String tableName) { return (long) computeActual(format("SELECT snapshot_id FROM \"%s$snapshots\" ORDER BY committed_at DESC LIMIT 1", tableName)) .getOnlyValue(); } @Test public void testInsertIntoNotNullColumn() { assertUpdate("CREATE TABLE test_not_null_table (c1 INTEGER, c2 INTEGER NOT NULL)"); assertUpdate("INSERT INTO test_not_null_table (c2) VALUES (2)", 1); assertQuery("SELECT * FROM test_not_null_table", "VALUES (NULL, 2)"); assertQueryFails("INSERT INTO test_not_null_table (c1) VALUES (1)", "NULL value not allowed for NOT NULL column: c2"); dropTable("test_not_null_table"); assertUpdate("CREATE TABLE test_commuted_not_null_table (a BIGINT, b BIGINT NOT NULL)"); assertUpdate("INSERT INTO test_commuted_not_null_table (b) VALUES (2)", 1); assertQuery("SELECT * FROM test_commuted_not_null_table", "VALUES (NULL, 2)"); assertQueryFails("INSERT INTO test_commuted_not_null_table (b, a) VALUES (NULL, 3)", "NULL value not allowed for NOT NULL column: b"); dropTable("test_commuted_not_null_table"); } @Test public void testSchemaEvolution() { assertUpdate("CREATE TABLE test_schema_evolution_drop_end (col0 INTEGER, col1 INTEGER, col2 INTEGER)"); assertUpdate("INSERT INTO test_schema_evolution_drop_end VALUES (0, 1, 2)", 1); assertQuery("SELECT * FROM test_schema_evolution_drop_end", "VALUES(0, 1, 2)"); assertUpdate("ALTER TABLE test_schema_evolution_drop_end DROP COLUMN col2"); assertQuery("SELECT * FROM test_schema_evolution_drop_end", "VALUES(0, 1)"); assertUpdate("ALTER TABLE test_schema_evolution_drop_end ADD COLUMN col2 INTEGER"); assertQuery("SELECT * FROM test_schema_evolution_drop_end", "VALUES(0, 1, NULL)"); assertUpdate("INSERT INTO test_schema_evolution_drop_end VALUES (3, 4, 5)", 1); assertQuery("SELECT * FROM test_schema_evolution_drop_end", "VALUES(0, 1, NULL), (3, 4, 5)"); dropTable("test_schema_evolution_drop_end"); assertUpdate("CREATE TABLE test_schema_evolution_drop_middle (col0 INTEGER, col1 INTEGER, col2 INTEGER)"); assertUpdate("INSERT INTO test_schema_evolution_drop_middle VALUES (0, 1, 2)", 1); assertQuery("SELECT * FROM test_schema_evolution_drop_middle", "VALUES(0, 1, 2)"); assertUpdate("ALTER TABLE test_schema_evolution_drop_middle DROP COLUMN col1"); assertQuery("SELECT * FROM test_schema_evolution_drop_middle", "VALUES(0, 2)"); assertUpdate("ALTER TABLE test_schema_evolution_drop_middle ADD COLUMN col1 INTEGER"); assertUpdate("INSERT INTO test_schema_evolution_drop_middle VALUES (3, 4, 5)", 1); assertQuery("SELECT * FROM test_schema_evolution_drop_middle", "VALUES(0, 2, NULL), (3, 4, 5)"); dropTable("test_schema_evolution_drop_middle"); } @Test public void testLargeInFailureOnPartitionedColumns() { QualifiedObjectName tableName = new QualifiedObjectName("iceberg", "tpch", "test_large_in_failure"); assertUpdate(format( "CREATE TABLE %s (col1 BIGINT, col2 BIGINT) WITH (partitioning = ARRAY['col2'])", tableName)); assertUpdate(format("INSERT INTO %s VALUES (1, 10)", tableName), 1L); assertUpdate(format("INSERT INTO %s VALUES (2, 20)", tableName), 1L); List<String> predicates = IntStream.range(0, 5000).boxed() .map(Object::toString) .collect(toImmutableList()); String filter = format("col2 IN (%s)", String.join(",", predicates)); assertThatThrownBy(() -> getQueryRunner().execute(format("SELECT * FROM %s WHERE %s", tableName, filter))) .isInstanceOf(RuntimeException.class) .hasMessage("java.lang.StackOverflowError"); dropTable("test_large_in_failure"); } @Test public void testCreateTableLike() { FileFormat otherFormat = format == PARQUET ? ORC : PARQUET; testCreateTableLikeForFormat(otherFormat); } private void testCreateTableLikeForFormat(FileFormat otherFormat) { assertUpdate(format("CREATE TABLE test_create_table_like_original (col1 INTEGER, aDate DATE) WITH(format = '%s', partitioning = ARRAY['aDate'])", format)); assertEquals(getTablePropertiesString("test_create_table_like_original"), "WITH (\n" + format(" format = '%s',\n", format) + " partitioning = ARRAY['adate']\n" + ")"); assertUpdate("CREATE TABLE test_create_table_like_copy0 (LIKE test_create_table_like_original, col2 INTEGER)"); assertUpdate("INSERT INTO test_create_table_like_copy0 (col1, aDate, col2) VALUES (1, CAST('1950-06-28' AS DATE), 3)", 1); assertQuery("SELECT * from test_create_table_like_copy0", "VALUES(1, CAST('1950-06-28' AS DATE), 3)"); dropTable("test_create_table_like_copy0"); assertUpdate("CREATE TABLE test_create_table_like_copy1 (LIKE test_create_table_like_original)"); assertEquals(getTablePropertiesString("test_create_table_like_copy1"), "WITH (\n" + format(" format = '%s'\n)", format)); dropTable("test_create_table_like_copy1"); assertUpdate("CREATE TABLE test_create_table_like_copy2 (LIKE test_create_table_like_original EXCLUDING PROPERTIES)"); assertEquals(getTablePropertiesString("test_create_table_like_copy2"), "WITH (\n" + format(" format = '%s'\n)", format)); dropTable("test_create_table_like_copy2"); assertUpdate("CREATE TABLE test_create_table_like_copy3 (LIKE test_create_table_like_original INCLUDING PROPERTIES)"); assertEquals(getTablePropertiesString("test_create_table_like_copy3"), "WITH (\n" + format(" format = '%s',\n", format) + " partitioning = ARRAY['adate']\n" + ")"); dropTable("test_create_table_like_copy3"); assertUpdate(format("CREATE TABLE test_create_table_like_copy4 (LIKE test_create_table_like_original INCLUDING PROPERTIES) WITH (format = '%s')", otherFormat)); assertEquals(getTablePropertiesString("test_create_table_like_copy4"), "WITH (\n" + format(" format = '%s',\n", otherFormat) + " partitioning = ARRAY['adate']\n" + ")"); dropTable("test_create_table_like_copy4"); dropTable("test_create_table_like_original"); } private String getTablePropertiesString(String tableName) { MaterializedResult showCreateTable = computeActual("SHOW CREATE TABLE " + tableName); String createTable = (String) getOnlyElement(showCreateTable.getOnlyColumnAsSet()); Matcher matcher = WITH_CLAUSE_EXTRACTER.matcher(createTable); return matcher.matches() ? matcher.group(1) : null; } @Test public void testPredicating() { assertUpdate("CREATE TABLE test_predicating_on_real (col REAL)"); assertUpdate("INSERT INTO test_predicating_on_real VALUES 1.2", 1); assertQuery("SELECT * FROM test_predicating_on_real WHERE col = 1.2", "VALUES 1.2"); dropTable("test_predicating_on_real"); } @Test public void testHourTransform() { assertUpdate("CREATE TABLE test_hour_transform (d TIMESTAMP(6), b BIGINT) WITH (partitioning = ARRAY['hour(d)'])"); @Language("SQL") String values = "VALUES " + "(TIMESTAMP '1969-12-31 22:22:22.222222', 8)," + "(TIMESTAMP '1969-12-31 23:33:11.456789', 9)," + "(TIMESTAMP '1969-12-31 23:44:55.567890', 10)," + "(TIMESTAMP '1970-01-01 00:55:44.765432', 11)," + "(TIMESTAMP '2015-01-01 10:01:23.123456', 1)," + "(TIMESTAMP '2015-01-01 10:10:02.987654', 2)," + "(TIMESTAMP '2015-01-01 10:55:00.456789', 3)," + "(TIMESTAMP '2015-05-15 12:05:01.234567', 4)," + "(TIMESTAMP '2015-05-15 12:21:02.345678', 5)," + "(TIMESTAMP '2020-02-21 13:11:11.876543', 6)," + "(TIMESTAMP '2020-02-21 13:12:12.654321', 7)"; assertUpdate("INSERT INTO test_hour_transform " + values, 11); assertQuery("SELECT * FROM test_hour_transform", values); @Language("SQL") String expected = "VALUES " + "(-2, 1, TIMESTAMP '1969-12-31 22:22:22.222222', TIMESTAMP '1969-12-31 22:22:22.222222', 8, 8), " + "(-1, 2, TIMESTAMP '1969-12-31 23:33:11.456789', TIMESTAMP '1969-12-31 23:44:55.567890', 9, 10), " + "(0, 1, TIMESTAMP '1970-01-01 00:55:44.765432', TIMESTAMP '1970-01-01 00:55:44.765432', 11, 11), " + "(394474, 3, TIMESTAMP '2015-01-01 10:01:23.123456', TIMESTAMP '2015-01-01 10:55:00.456789', 1, 3), " + "(397692, 2, TIMESTAMP '2015-05-15 12:05:01.234567', TIMESTAMP '2015-05-15 12:21:02.345678', 4, 5), " + "(439525, 2, TIMESTAMP '2020-02-21 13:11:11.876543', TIMESTAMP '2020-02-21 13:12:12.654321', 6, 7)"; if (format == ORC) { expected = "VALUES " + "(-2, 1, NULL, NULL, 8, 8), " + "(-1, 2, NULL, NULL, 9, 10), " + "(0, 1, NULL, NULL, 11, 11), " + "(394474, 3, NULL, NULL, 1, 3), " + "(397692, 2, NULL, NULL, 4, 5), " + "(439525, 2, NULL, NULL, 6, 7)"; } assertQuery("SELECT d_hour, row_count, d.min, d.max, b.min, b.max FROM \"test_hour_transform$partitions\"", expected); // Exercise IcebergMetadata.applyFilter with non-empty Constraint.predicate, via non-pushdownable predicates assertQuery( "SELECT * FROM test_hour_transform WHERE day_of_week(d) = 3 AND b % 7 = 3", "VALUES (TIMESTAMP '1969-12-31 23:44:55.567890', 10)"); assertThat(query("SHOW STATS FOR test_hour_transform")) .projected(0, 2, 3, 4, 5, 6) // ignore data size which is available for Parquet, but not for ORC .skippingTypesCheck() .matches("VALUES " + " ('d', NULL, 0e0, NULL, " + (format == ORC ? "NULL, NULL" : "'1969-12-31 22:22:22.222222', '2020-02-21 13:12:12.654321'") + "), " + " ('b', NULL, 0e0, NULL, '1', '11'), " + " (NULL, NULL, NULL, 11e0, NULL, NULL)"); dropTable("test_hour_transform"); } @Test public void testDayTransformDate() { assertUpdate("CREATE TABLE test_day_transform_date (d DATE, b BIGINT) WITH (partitioning = ARRAY['day(d)'])"); @Language("SQL") String values = "VALUES " + "(DATE '1969-01-01', 10), " + "(DATE '1969-12-31', 11), " + "(DATE '1970-01-01', 1), " + "(DATE '1970-03-04', 2), " + "(DATE '2015-01-01', 3), " + "(DATE '2015-01-13', 4), " + "(DATE '2015-01-13', 5), " + "(DATE '2015-05-15', 6), " + "(DATE '2015-05-15', 7), " + "(DATE '2020-02-21', 8), " + "(DATE '2020-02-21', 9)"; assertUpdate("INSERT INTO test_day_transform_date " + values, 11); assertQuery("SELECT * FROM test_day_transform_date", values); assertQuery( "SELECT d_day, row_count, d.min, d.max, b.min, b.max FROM \"test_day_transform_date$partitions\"", "VALUES " + "(DATE '1969-01-01', 1, DATE '1969-01-01', DATE '1969-01-01', 10, 10), " + "(DATE '1969-12-31', 1, DATE '1969-12-31', DATE '1969-12-31', 11, 11), " + "(DATE '1970-01-01', 1, DATE '1970-01-01', DATE '1970-01-01', 1, 1), " + "(DATE '1970-03-04', 1, DATE '1970-03-04', DATE '1970-03-04', 2, 2), " + "(DATE '2015-01-01', 1, DATE '2015-01-01', DATE '2015-01-01', 3, 3), " + "(DATE '2015-01-13', 2, DATE '2015-01-13', DATE '2015-01-13', 4, 5), " + "(DATE '2015-05-15', 2, DATE '2015-05-15', DATE '2015-05-15', 6, 7), " + "(DATE '2020-02-21', 2, DATE '2020-02-21', DATE '2020-02-21', 8, 9)"); // Exercise IcebergMetadata.applyFilter with non-empty Constraint.predicate, via non-pushdownable predicates assertQuery( "SELECT * FROM test_day_transform_date WHERE day_of_week(d) = 3 AND b % 7 = 3", "VALUES (DATE '1969-01-01', 10)"); assertThat(query("SHOW STATS FOR test_day_transform_date")) .projected(0, 2, 3, 4, 5, 6) // ignore data size which is available for Parquet, but not for ORC .skippingTypesCheck() .matches("VALUES " + " ('d', NULL, 0e0, NULL, '1969-01-01', '2020-02-21'), " + " ('b', NULL, 0e0, NULL, '1', '11'), " + " (NULL, NULL, NULL, 11e0, NULL, NULL)"); dropTable("test_day_transform_date"); } @Test public void testDayTransformTimestamp() { assertUpdate("CREATE TABLE test_day_transform_timestamp (d TIMESTAMP(6), b BIGINT) WITH (partitioning = ARRAY['day(d)'])"); @Language("SQL") String values = "VALUES " + "(TIMESTAMP '1969-12-25 15:13:12.876543', 8)," + "(TIMESTAMP '1969-12-30 18:47:33.345678', 9)," + "(TIMESTAMP '1969-12-31 00:00:00.000000', 10)," + "(TIMESTAMP '1969-12-31 05:06:07.234567', 11)," + "(TIMESTAMP '1970-01-01 12:03:08.456789', 12)," + "(TIMESTAMP '2015-01-01 10:01:23.123456', 1)," + "(TIMESTAMP '2015-01-01 11:10:02.987654', 2)," + "(TIMESTAMP '2015-01-01 12:55:00.456789', 3)," + "(TIMESTAMP '2015-05-15 13:05:01.234567', 4)," + "(TIMESTAMP '2015-05-15 14:21:02.345678', 5)," + "(TIMESTAMP '2020-02-21 15:11:11.876543', 6)," + "(TIMESTAMP '2020-02-21 16:12:12.654321', 7)"; assertUpdate("INSERT INTO test_day_transform_timestamp " + values, 12); assertQuery("SELECT * FROM test_day_transform_timestamp", values); @Language("SQL") String expected = "VALUES " + "(DATE '1969-12-25', 1, TIMESTAMP '1969-12-25 15:13:12.876543', TIMESTAMP '1969-12-25 15:13:12.876543', 8, 8), " + "(DATE '1969-12-30', 1, TIMESTAMP '1969-12-30 18:47:33.345678', TIMESTAMP '1969-12-30 18:47:33.345678', 9, 9), " + "(DATE '1969-12-31', 2, TIMESTAMP '1969-12-31 00:00:00.000000', TIMESTAMP '1969-12-31 05:06:07.234567', 10, 11), " + "(DATE '1970-01-01', 1, TIMESTAMP '1970-01-01 12:03:08.456789', TIMESTAMP '1970-01-01 12:03:08.456789', 12, 12), " + "(DATE '2015-01-01', 3, TIMESTAMP '2015-01-01 10:01:23.123456', TIMESTAMP '2015-01-01 12:55:00.456789', 1, 3), " + "(DATE '2015-05-15', 2, TIMESTAMP '2015-05-15 13:05:01.234567', TIMESTAMP '2015-05-15 14:21:02.345678', 4, 5), " + "(DATE '2020-02-21', 2, TIMESTAMP '2020-02-21 15:11:11.876543', TIMESTAMP '2020-02-21 16:12:12.654321', 6, 7)"; if (format == ORC) { // Parquet has min/max for timestamps but ORC does not. expected = "VALUES " + "(DATE '1969-12-25', 1, NULL, NULL, 8, 8), " + "(DATE '1969-12-30', 1, NULL, NULL, 9, 9), " + "(DATE '1969-12-31', 2, NULL, NULL, 10, 11), " + "(DATE '1970-01-01', 1, NULL, NULL, 12, 12), " + "(DATE '2015-01-01', 3, NULL, NULL, 1, 3), " + "(DATE '2015-05-15', 2, NULL, NULL, 4, 5), " + "(DATE '2020-02-21', 2, NULL, NULL, 6, 7)"; } assertQuery("SELECT d_day, row_count, d.min, d.max, b.min, b.max FROM \"test_day_transform_timestamp$partitions\"", expected); // Exercise IcebergMetadata.applyFilter with non-empty Constraint.predicate, via non-pushdownable predicates assertQuery( "SELECT * FROM test_day_transform_timestamp WHERE day_of_week(d) = 3 AND b % 7 = 3", "VALUES (TIMESTAMP '1969-12-31 00:00:00.000000', 10)"); assertThat(query("SHOW STATS FOR test_day_transform_timestamp")) .projected(0, 2, 3, 4, 5, 6) // ignore data size which is available for Parquet, but not for ORC .skippingTypesCheck() .matches("VALUES " + " ('d', NULL, 0e0, NULL, " + (format == ORC ? "NULL, NULL" : "'1969-12-25 15:13:12.876543', '2020-02-21 16:12:12.654321'") + "), " + " ('b', NULL, 0e0, NULL, '1', '12'), " + " (NULL, NULL, NULL, 12e0, NULL, NULL)"); dropTable("test_day_transform_timestamp"); } @Test public void testMonthTransformDate() { assertUpdate("CREATE TABLE test_month_transform_date (d DATE, b BIGINT) WITH (partitioning = ARRAY['month(d)'])"); @Language("SQL") String values = "VALUES " + "(DATE '1969-11-13', 1)," + "(DATE '1969-12-01', 2)," + "(DATE '1969-12-02', 3)," + "(DATE '1969-12-31', 4)," + "(DATE '1970-01-01', 5), " + "(DATE '1970-05-13', 6), " + "(DATE '1970-12-31', 7), " + "(DATE '2020-01-01', 8), " + "(DATE '2020-06-16', 9), " + "(DATE '2020-06-28', 10), " + "(DATE '2020-06-06', 11), " + "(DATE '2020-07-18', 12), " + "(DATE '2020-07-28', 13), " + "(DATE '2020-12-31', 14)"; assertUpdate("INSERT INTO test_month_transform_date " + values, 14); assertQuery("SELECT * FROM test_month_transform_date", values); assertQuery( "SELECT d_month, row_count, d.min, d.max, b.min, b.max FROM \"test_month_transform_date$partitions\"", "VALUES " + "(-2, 1, DATE '1969-11-13', DATE '1969-11-13', 1, 1), " + "(-1, 3, DATE '1969-12-01', DATE '1969-12-31', 2, 4), " + "(0, 1, DATE '1970-01-01', DATE '1970-01-01', 5, 5), " + "(4, 1, DATE '1970-05-13', DATE '1970-05-13', 6, 6), " + "(11, 1, DATE '1970-12-31', DATE '1970-12-31', 7, 7), " + "(600, 1, DATE '2020-01-01', DATE '2020-01-01', 8, 8), " + "(605, 3, DATE '2020-06-06', DATE '2020-06-28', 9, 11), " + "(606, 2, DATE '2020-07-18', DATE '2020-07-28', 12, 13), " + "(611, 1, DATE '2020-12-31', DATE '2020-12-31', 14, 14)"); // Exercise IcebergMetadata.applyFilter with non-empty Constraint.predicate, via non-pushdownable predicates assertQuery( "SELECT * FROM test_month_transform_date WHERE day_of_week(d) = 7 AND b % 7 = 3", "VALUES (DATE '2020-06-28', 10)"); assertThat(query("SHOW STATS FOR test_month_transform_date")) .projected(0, 2, 3, 4, 5, 6) // ignore data size which is available for Parquet, but not for ORC .skippingTypesCheck() .matches("VALUES " + " ('d', NULL, 0e0, NULL, '1969-11-13', '2020-12-31'), " + " ('b', NULL, 0e0, NULL, '1', '14'), " + " (NULL, NULL, NULL, 14e0, NULL, NULL)"); dropTable("test_month_transform_date"); } @Test public void testMonthTransformTimestamp() { assertUpdate("CREATE TABLE test_month_transform_timestamp (d TIMESTAMP(6), b BIGINT) WITH (partitioning = ARRAY['month(d)'])"); @Language("SQL") String values = "VALUES " + "(TIMESTAMP '1969-11-15 15:13:12.876543', 8)," + "(TIMESTAMP '1969-11-19 18:47:33.345678', 9)," + "(TIMESTAMP '1969-12-01 00:00:00.000000', 10)," + "(TIMESTAMP '1969-12-01 05:06:07.234567', 11)," + "(TIMESTAMP '1970-01-01 12:03:08.456789', 12)," + "(TIMESTAMP '2015-01-01 10:01:23.123456', 1)," + "(TIMESTAMP '2015-01-01 11:10:02.987654', 2)," + "(TIMESTAMP '2015-01-01 12:55:00.456789', 3)," + "(TIMESTAMP '2015-05-15 13:05:01.234567', 4)," + "(TIMESTAMP '2015-05-15 14:21:02.345678', 5)," + "(TIMESTAMP '2020-02-21 15:11:11.876543', 6)," + "(TIMESTAMP '2020-02-21 16:12:12.654321', 7)"; assertUpdate("INSERT INTO test_month_transform_timestamp " + values, 12); assertQuery("SELECT * FROM test_month_transform_timestamp", values); @Language("SQL") String expected = "VALUES " + "(-2, 2, TIMESTAMP '1969-11-15 15:13:12.876543', TIMESTAMP '1969-11-19 18:47:33.345678', 8, 9), " + "(-1, 2, TIMESTAMP '1969-12-01 00:00:00.000000', TIMESTAMP '1969-12-01 05:06:07.234567', 10, 11), " + "(0, 1, TIMESTAMP '1970-01-01 12:03:08.456789', TIMESTAMP '1970-01-01 12:03:08.456789', 12, 12), " + "(540, 3, TIMESTAMP '2015-01-01 10:01:23.123456', TIMESTAMP '2015-01-01 12:55:00.456789', 1, 3), " + "(544, 2, TIMESTAMP '2015-05-15 13:05:01.234567', TIMESTAMP '2015-05-15 14:21:02.345678', 4, 5), " + "(601, 2, TIMESTAMP '2020-02-21 15:11:11.876543', TIMESTAMP '2020-02-21 16:12:12.654321', 6, 7)"; if (format == ORC) { expected = "VALUES " + "(-2, 2, NULL, NULL, 8, 9), " + "(-1, 2, NULL, NULL, 10, 11), " + "(0, 1, NULL, NULL, 12, 12), " + "(540, 3, NULL, NULL, 1, 3), " + "(544, 2, NULL, NULL, 4, 5), " + "(601, 2, NULL, NULL, 6, 7)"; } assertQuery("SELECT d_month, row_count, d.min, d.max, b.min, b.max FROM \"test_month_transform_timestamp$partitions\"", expected); // Exercise IcebergMetadata.applyFilter with non-empty Constraint.predicate, via non-pushdownable predicates assertQuery( "SELECT * FROM test_month_transform_timestamp WHERE day_of_week(d) = 1 AND b % 7 = 3", "VALUES (TIMESTAMP '1969-12-01 00:00:00.000000', 10)"); assertThat(query("SHOW STATS FOR test_month_transform_timestamp")) .projected(0, 2, 3, 4, 5, 6) // ignore data size which is available for Parquet, but not for ORC .skippingTypesCheck() .matches("VALUES " + " ('d', NULL, 0e0, NULL, " + (format == ORC ? "NULL, NULL" : "'1969-11-15 15:13:12.876543', '2020-02-21 16:12:12.654321'") + "), " + " ('b', NULL, 0e0, NULL, '1', '12'), " + " (NULL, NULL, NULL, 12e0, NULL, NULL)"); dropTable("test_month_transform_timestamp"); } @Test public void testYearTransformDate() { assertUpdate("CREATE TABLE test_year_transform_date (d DATE, b BIGINT) WITH (partitioning = ARRAY['year(d)'])"); @Language("SQL") String values = "VALUES " + "(DATE '1968-10-13', 1), " + "(DATE '1969-01-01', 2), " + "(DATE '1969-03-15', 3), " + "(DATE '1970-01-01', 4), " + "(DATE '1970-03-05', 5), " + "(DATE '2015-01-01', 6), " + "(DATE '2015-06-16', 7), " + "(DATE '2015-07-28', 8), " + "(DATE '2016-05-15', 9), " + "(DATE '2016-06-06', 10), " + "(DATE '2020-02-21', 11), " + "(DATE '2020-11-10', 12)"; assertUpdate("INSERT INTO test_year_transform_date " + values, 12); assertQuery("SELECT * FROM test_year_transform_date", values); assertQuery( "SELECT d_year, row_count, d.min, d.max, b.min, b.max FROM \"test_year_transform_date$partitions\"", "VALUES " + "(-2, 1, DATE '1968-10-13', DATE '1968-10-13', 1, 1), " + "(-1, 2, DATE '1969-01-01', DATE '1969-03-15', 2, 3), " + "(0, 2, DATE '1970-01-01', DATE '1970-03-05', 4, 5), " + "(45, 3, DATE '2015-01-01', DATE '2015-07-28', 6, 8), " + "(46, 2, DATE '2016-05-15', DATE '2016-06-06', 9, 10), " + "(50, 2, DATE '2020-02-21', DATE '2020-11-10', 11, 12)"); // Exercise IcebergMetadata.applyFilter with non-empty Constraint.predicate, via non-pushdownable predicates assertQuery( "SELECT * FROM test_year_transform_date WHERE day_of_week(d) = 1 AND b % 7 = 3", "VALUES (DATE '2016-06-06', 10)"); assertThat(query("SHOW STATS FOR test_year_transform_date")) .projected(0, 2, 3, 4, 5, 6) // ignore data size which is available for Parquet, but not for ORC .skippingTypesCheck() .matches("VALUES " + " ('d', NULL, 0e0, NULL, '1968-10-13', '2020-11-10'), " + " ('b', NULL, 0e0, NULL, '1', '12'), " + " (NULL, NULL, NULL, 12e0, NULL, NULL)"); dropTable("test_year_transform_date"); } @Test public void testYearTransformTimestamp() { assertUpdate("CREATE TABLE test_year_transform_timestamp (d TIMESTAMP(6), b BIGINT) WITH (partitioning = ARRAY['year(d)'])"); @Language("SQL") String values = "VALUES " + "(TIMESTAMP '1968-03-15 15:13:12.876543', 1)," + "(TIMESTAMP '1968-11-19 18:47:33.345678', 2)," + "(TIMESTAMP '1969-01-01 00:00:00.000000', 3)," + "(TIMESTAMP '1969-01-01 05:06:07.234567', 4)," + "(TIMESTAMP '1970-01-18 12:03:08.456789', 5)," + "(TIMESTAMP '1970-03-14 10:01:23.123456', 6)," + "(TIMESTAMP '1970-08-19 11:10:02.987654', 7)," + "(TIMESTAMP '1970-12-31 12:55:00.456789', 8)," + "(TIMESTAMP '2015-05-15 13:05:01.234567', 9)," + "(TIMESTAMP '2015-09-15 14:21:02.345678', 10)," + "(TIMESTAMP '2020-02-21 15:11:11.876543', 11)," + "(TIMESTAMP '2020-08-21 16:12:12.654321', 12)"; assertUpdate("INSERT INTO test_year_transform_timestamp " + values, 12); assertQuery("SELECT * FROM test_year_transform_timestamp", values); @Language("SQL") String expected = "VALUES " + "(-2, 2, TIMESTAMP '1968-03-15 15:13:12.876543', TIMESTAMP '1968-11-19 18:47:33.345678', 1, 2), " + "(-1, 2, TIMESTAMP '1969-01-01 00:00:00.000000', TIMESTAMP '1969-01-01 05:06:07.234567', 3, 4), " + "(0, 4, TIMESTAMP '1970-01-18 12:03:08.456789', TIMESTAMP '1970-12-31 12:55:00.456789', 5, 8), " + "(45, 2, TIMESTAMP '2015-05-15 13:05:01.234567', TIMESTAMP '2015-09-15 14:21:02.345678', 9, 10), " + "(50, 2, TIMESTAMP '2020-02-21 15:11:11.876543', TIMESTAMP '2020-08-21 16:12:12.654321', 11, 12)"; if (format == ORC) { expected = "VALUES " + "(-2, 2, NULL, NULL, 1, 2), " + "(-1, 2, NULL, NULL, 3, 4), " + "(0, 4, NULL, NULL, 5, 8), " + "(45, 2, NULL, NULL, 9, 10), " + "(50, 2, NULL, NULL, 11, 12)"; } assertQuery("SELECT d_year, row_count, d.min, d.max, b.min, b.max FROM \"test_year_transform_timestamp$partitions\"", expected); // Exercise IcebergMetadata.applyFilter with non-empty Constraint.predicate, via non-pushdownable predicates assertQuery( "SELECT * FROM test_year_transform_timestamp WHERE day_of_week(d) = 2 AND b % 7 = 3", "VALUES (TIMESTAMP '2015-09-15 14:21:02.345678', 10)"); assertThat(query("SHOW STATS FOR test_year_transform_timestamp")) .projected(0, 2, 3, 4, 5, 6) // ignore data size which is available for Parquet, but not for ORC .skippingTypesCheck() .matches("VALUES " + " ('d', NULL, 0e0, NULL, " + (format == ORC ? "NULL, NULL" : "'1968-03-15 15:13:12.876543', '2020-08-21 16:12:12.654321'") + "), " + " ('b', NULL, 0e0, NULL, '1', '12'), " + " (NULL, NULL, NULL, 12e0, NULL, NULL)"); dropTable("test_year_transform_timestamp"); } @Test public void testTruncateTextTransform() { assertUpdate("CREATE TABLE test_truncate_text_transform (d VARCHAR, b BIGINT) WITH (partitioning = ARRAY['truncate(d, 2)'])"); String select = "SELECT d_trunc, row_count, d.min AS d_min, d.max AS d_max, b.min AS b_min, b.max AS b_max FROM \"test_truncate_text_transform$partitions\""; assertUpdate("INSERT INTO test_truncate_text_transform VALUES" + "('abcd', 1)," + "('abxy', 2)," + "('ab598', 3)," + "('mommy', 4)," + "('moscow', 5)," + "('Greece', 6)," + "('Grozny', 7)", 7); assertQuery("SELECT d_trunc FROM \"test_truncate_text_transform$partitions\"", "VALUES 'ab', 'mo', 'Gr'"); assertQuery("SELECT b FROM test_truncate_text_transform WHERE substring(d, 1, 2) = 'ab'", "VALUES 1, 2, 3"); assertQuery(select + " WHERE d_trunc = 'ab'", "VALUES ('ab', 3, 'ab598', 'abxy', 1, 3)"); assertQuery("SELECT b FROM test_truncate_text_transform WHERE substring(d, 1, 2) = 'mo'", "VALUES 4, 5"); assertQuery(select + " WHERE d_trunc = 'mo'", "VALUES ('mo', 2, 'mommy', 'moscow', 4, 5)"); assertQuery("SELECT b FROM test_truncate_text_transform WHERE substring(d, 1, 2) = 'Gr'", "VALUES 6, 7"); assertQuery(select + " WHERE d_trunc = 'Gr'", "VALUES ('Gr', 2, 'Greece', 'Grozny', 6, 7)"); // Exercise IcebergMetadata.applyFilter with non-empty Constraint.predicate, via non-pushdownable predicates assertQuery( "SELECT * FROM test_truncate_text_transform WHERE length(d) = 4 AND b % 7 = 2", "VALUES ('abxy', 2)"); assertThat(query("SHOW STATS FOR test_truncate_text_transform")) .projected(0, 2, 3, 4, 5, 6) // ignore data size which is available for Parquet, but not for ORC .skippingTypesCheck() .matches("VALUES " + " ('d', NULL, 0e0, NULL, NULL, NULL), " + " ('b', NULL, 0e0, NULL, '1', '7'), " + " (NULL, NULL, NULL, 7e0, NULL, NULL)"); dropTable("test_truncate_text_transform"); } @Test(dataProvider = "truncateNumberTypesProvider") public void testTruncateIntegerTransform(String dataType) { String table = format("test_truncate_%s_transform", dataType); assertUpdate(format("CREATE TABLE " + table + " (d %s, b BIGINT) WITH (partitioning = ARRAY['truncate(d, 10)'])", dataType)); String select = "SELECT d_trunc, row_count, d.min AS d_min, d.max AS d_max, b.min AS b_min, b.max AS b_max FROM \"" + table + "$partitions\""; assertUpdate("INSERT INTO " + table + " VALUES" + "(0, 1)," + "(1, 2)," + "(5, 3)," + "(9, 4)," + "(10, 5)," + "(11, 6)," + "(120, 7)," + "(121, 8)," + "(123, 9)," + "(-1, 10)," + "(-5, 11)," + "(-10, 12)," + "(-11, 13)," + "(-123, 14)," + "(-130, 15)", 15); assertQuery("SELECT d_trunc FROM \"" + table + "$partitions\"", "VALUES 0, 10, 120, -10, -20, -130"); assertQuery("SELECT b FROM " + table + " WHERE d IN (0, 1, 5, 9)", "VALUES 1, 2, 3, 4"); assertQuery(select + " WHERE d_trunc = 0", "VALUES (0, 4, 0, 9, 1, 4)"); assertQuery("SELECT b FROM " + table + " WHERE d IN (10, 11)", "VALUES 5, 6"); assertQuery(select + " WHERE d_trunc = 10", "VALUES (10, 2, 10, 11, 5, 6)"); assertQuery("SELECT b FROM " + table + " WHERE d IN (120, 121, 123)", "VALUES 7, 8, 9"); assertQuery(select + " WHERE d_trunc = 120", "VALUES (120, 3, 120, 123, 7, 9)"); assertQuery("SELECT b FROM " + table + " WHERE d IN (-1, -5, -10)", "VALUES 10, 11, 12"); assertQuery(select + " WHERE d_trunc = -10", "VALUES (-10, 3, -10, -1, 10, 12)"); assertQuery("SELECT b FROM " + table + " WHERE d = -11", "VALUES 13"); assertQuery(select + " WHERE d_trunc = -20", "VALUES (-20, 1, -11, -11, 13, 13)"); assertQuery("SELECT b FROM " + table + " WHERE d IN (-123, -130)", "VALUES 14, 15"); assertQuery(select + " WHERE d_trunc = -130", "VALUES (-130, 2, -130, -123, 14, 15)"); // Exercise IcebergMetadata.applyFilter with non-empty Constraint.predicate, via non-pushdownable predicates assertQuery( "SELECT * FROM " + table + " WHERE d % 10 = -1 AND b % 7 = 3", "VALUES (-1, 10)"); assertThat(query("SHOW STATS FOR " + table)) .projected(0, 2, 3, 4, 5, 6) // ignore data size which is available for Parquet, but not for ORC .skippingTypesCheck() .matches("VALUES " + " ('d', NULL, 0e0, NULL, '-130', '123'), " + " ('b', NULL, 0e0, NULL, '1', '15'), " + " (NULL, NULL, NULL, 15e0, NULL, NULL)"); dropTable(table); } @DataProvider public Object[][] truncateNumberTypesProvider() { return new Object[][] { {"integer"}, {"bigint"}, }; } @Test public void testTruncateDecimalTransform() { assertUpdate("CREATE TABLE test_truncate_decimal_transform (d DECIMAL(9, 2), b BIGINT) WITH (partitioning = ARRAY['truncate(d, 10)'])"); String select = "SELECT d_trunc, row_count, d.min AS d_min, d.max AS d_max, b.min AS b_min, b.max AS b_max FROM \"test_truncate_decimal_transform$partitions\""; assertUpdate("INSERT INTO test_truncate_decimal_transform VALUES" + "(12.34, 1)," + "(12.30, 2)," + "(12.29, 3)," + "(0.05, 4)," + "(-0.05, 5)", 5); assertQuery("SELECT d_trunc FROM \"test_truncate_decimal_transform$partitions\"", "VALUES 12.30, 12.20, 0.00, -0.10"); assertQuery("SELECT b FROM test_truncate_decimal_transform WHERE d IN (12.34, 12.30)", "VALUES 1, 2"); assertQuery(select + " WHERE d_trunc = 12.30", "VALUES (12.30, 2, 12.30, 12.34, 1, 2)"); assertQuery("SELECT b FROM test_truncate_decimal_transform WHERE d = 12.29", "VALUES 3"); assertQuery(select + " WHERE d_trunc = 12.20", "VALUES (12.20, 1, 12.29, 12.29, 3, 3)"); assertQuery("SELECT b FROM test_truncate_decimal_transform WHERE d = 0.05", "VALUES 4"); assertQuery(select + " WHERE d_trunc = 0.00", "VALUES (0.00, 1, 0.05, 0.05, 4, 4)"); assertQuery("SELECT b FROM test_truncate_decimal_transform WHERE d = -0.05", "VALUES 5"); assertQuery(select + " WHERE d_trunc = -0.10", "VALUES (-0.10, 1, -0.05, -0.05, 5, 5)"); // Exercise IcebergMetadata.applyFilter with non-empty Constraint.predicate, via non-pushdownable predicates assertQuery( "SELECT * FROM test_truncate_decimal_transform WHERE d * 100 % 10 = 9 AND b % 7 = 3", "VALUES (12.29, 3)"); assertThat(query("SHOW STATS FOR test_truncate_decimal_transform")) .projected(0, 2, 3, 4, 5, 6) // ignore data size which is available for Parquet, but not for ORC .skippingTypesCheck() .matches("VALUES " + " ('d', NULL, 0e0, NULL, '-0.05', '12.34'), " + " ('b', NULL, 0e0, NULL, '1', '5'), " + " (NULL, NULL, NULL, 5e0, NULL, NULL)"); dropTable("test_truncate_decimal_transform"); } @Test public void testBucketTransform() { String select = "SELECT d_bucket, row_count, d.min AS d_min, d.max AS d_max, b.min AS b_min, b.max AS b_max FROM \"test_bucket_transform$partitions\""; assertUpdate("CREATE TABLE test_bucket_transform (d VARCHAR, b BIGINT) WITH (partitioning = ARRAY['bucket(d, 2)'])"); assertUpdate( "INSERT INTO test_bucket_transform VALUES" + "('abcd', 1)," + "('abxy', 2)," + "('ab598', 3)," + "('mommy', 4)," + "('moscow', 5)," + "('Greece', 6)," + "('Grozny', 7)", 7); assertQuery("SELECT COUNT(*) FROM \"test_bucket_transform$partitions\"", "SELECT 2"); assertQuery(select + " WHERE d_bucket = 0", "VALUES(0, 3, 'Grozny', 'mommy', 1, 7)"); assertQuery(select + " WHERE d_bucket = 1", "VALUES(1, 4, 'Greece', 'moscow', 2, 6)"); // Exercise IcebergMetadata.applyFilter with non-empty Constraint.predicate, via non-pushdownable predicates assertQuery( "SELECT * FROM test_bucket_transform WHERE length(d) = 4 AND b % 7 = 2", "VALUES ('abxy', 2)"); assertThat(query("SHOW STATS FOR test_bucket_transform")) .projected(0, 2, 3, 4, 5, 6) // ignore data size which is available for Parquet, but not for ORC .skippingTypesCheck() .matches("VALUES " + " ('d', NULL, 0e0, NULL, NULL, NULL), " + " ('b', NULL, 0e0, NULL, '1', '7'), " + " (NULL, NULL, NULL, 7e0, NULL, NULL)"); dropTable("test_bucket_transform"); } @Test public void testMetadataDeleteSimple() { assertUpdate("CREATE TABLE test_metadata_delete_simple (col1 BIGINT, col2 BIGINT) WITH (partitioning = ARRAY['col1'])"); assertUpdate("INSERT INTO test_metadata_delete_simple VALUES(1, 100), (1, 101), (1, 102), (2, 200), (2, 201), (3, 300)", 6); assertQueryFails( "DELETE FROM test_metadata_delete_simple WHERE col1 = 1 AND col2 > 101", "This connector only supports delete where one or more partitions are deleted entirely"); assertQuery("SELECT sum(col2) FROM test_metadata_delete_simple", "SELECT 1004"); assertQuery("SELECT count(*) FROM \"test_metadata_delete_simple$partitions\"", "SELECT 3"); assertUpdate("DELETE FROM test_metadata_delete_simple WHERE col1 = 1"); assertQuery("SELECT sum(col2) FROM test_metadata_delete_simple", "SELECT 701"); assertQuery("SELECT count(*) FROM \"test_metadata_delete_simple$partitions\"", "SELECT 2"); dropTable("test_metadata_delete_simple"); } @Test public void testMetadataDelete() { assertUpdate("CREATE TABLE test_metadata_delete (" + " orderkey BIGINT," + " linenumber INTEGER," + " linestatus VARCHAR" + ") " + "WITH (" + " partitioning = ARRAY[ 'linenumber', 'linestatus' ]" + ")"); assertUpdate( "" + "INSERT INTO test_metadata_delete " + "SELECT orderkey, linenumber, linestatus " + "FROM tpch.tiny.lineitem", "SELECT count(*) FROM lineitem"); assertQuery("SELECT COUNT(*) FROM \"test_metadata_delete$partitions\"", "SELECT 14"); assertUpdate("DELETE FROM test_metadata_delete WHERE linestatus = 'F' AND linenumber = 3"); assertQuery("SELECT * FROM test_metadata_delete", "SELECT orderkey, linenumber, linestatus FROM lineitem WHERE linestatus <> 'F' or linenumber <> 3"); assertQuery("SELECT count(*) FROM \"test_metadata_delete$partitions\"", "SELECT 13"); assertUpdate("DELETE FROM test_metadata_delete WHERE linestatus='O'"); assertQuery("SELECT count(*) FROM \"test_metadata_delete$partitions\"", "SELECT 6"); assertQuery("SELECT * FROM test_metadata_delete", "SELECT orderkey, linenumber, linestatus FROM lineitem WHERE linestatus <> 'O' AND linenumber <> 3"); assertQueryFails("DELETE FROM test_metadata_delete WHERE orderkey=1", "This connector only supports delete where one or more partitions are deleted entirely"); dropTable("test_metadata_delete"); } @Test public void testInSet() { testInSet(31); testInSet(35); } private void testInSet(int inCount) { String values = range(1, inCount + 1) .mapToObj(n -> format("(%s, %s)", n, n + 10)) .collect(joining(", ")); String inList = range(1, inCount + 1) .mapToObj(Integer::toString) .collect(joining(", ")); assertUpdate("CREATE TABLE test_in_set (col1 INTEGER, col2 BIGINT)"); assertUpdate(format("INSERT INTO test_in_set VALUES %s", values), inCount); // This proves that SELECTs with large IN phrases work correctly computeActual(format("SELECT col1 FROM test_in_set WHERE col1 IN (%s)", inList)); dropTable("test_in_set"); } @Test public void testBasicTableStatistics() { String tableName = "test_basic_table_statistics"; assertUpdate(format("CREATE TABLE %s (col REAL)", tableName)); String insertStart = format("INSERT INTO %s", tableName); assertUpdate(insertStart + " VALUES -10", 1); assertUpdate(insertStart + " VALUES 100", 1); // SHOW STATS returns rows of the form: column_name, data_size, distinct_values_count, nulls_fractions, row_count, low_value, high_value MaterializedResult result = computeActual("SHOW STATS FOR " + tableName); MaterializedResult expectedStatistics = resultBuilder(getSession(), VARCHAR, DOUBLE, DOUBLE, DOUBLE, DOUBLE, VARCHAR, VARCHAR) .row("col", null, null, 0.0, null, "-10.0", "100.0") .row(null, null, null, null, 2.0, null, null) .build(); assertEquals(result, expectedStatistics); assertUpdate(insertStart + " VALUES 200", 1); result = computeActual("SHOW STATS FOR " + tableName); expectedStatistics = resultBuilder(getSession(), VARCHAR, DOUBLE, DOUBLE, DOUBLE, DOUBLE, VARCHAR, VARCHAR) .row("col", null, null, 0.0, null, "-10.0", "200.0") .row(null, null, null, null, 3.0, null, null) .build(); assertEquals(result, expectedStatistics); dropTable(tableName); } @Test public void testMultipleColumnTableStatistics() { String tableName = "test_multiple_table_statistics"; assertUpdate(format("CREATE TABLE %s (col1 REAL, col2 INTEGER, col3 DATE)", tableName)); String insertStart = format("INSERT INTO %s", tableName); assertUpdate(insertStart + " VALUES (-10, -1, DATE '2019-06-28')", 1); assertUpdate(insertStart + " VALUES (100, 10, DATE '2020-01-01')", 1); MaterializedResult result = computeActual("SHOW STATS FOR " + tableName); MaterializedResult expectedStatistics = resultBuilder(getSession(), VARCHAR, DOUBLE, DOUBLE, DOUBLE, DOUBLE, VARCHAR, VARCHAR) .row("col1", null, null, 0.0, null, "-10.0", "100.0") .row("col2", null, null, 0.0, null, "-1", "10") .row("col3", null, null, 0.0, null, "2019-06-28", "2020-01-01") .row(null, null, null, null, 2.0, null, null) .build(); assertEquals(result, expectedStatistics); assertUpdate(insertStart + " VALUES (200, 20, DATE '2020-06-28')", 1); result = computeActual("SHOW STATS FOR " + tableName); expectedStatistics = resultBuilder(getSession(), VARCHAR, DOUBLE, DOUBLE, DOUBLE, DOUBLE, VARCHAR, VARCHAR) .row("col1", null, null, 0.0, null, "-10.0", "200.0") .row("col2", null, null, 0.0, null, "-1", "20") .row("col3", null, null, 0.0, null, "2019-06-28", "2020-06-28") .row(null, null, null, null, 3.0, null, null) .build(); assertEquals(result, expectedStatistics); assertUpdate(insertStart + " VALUES " + IntStream.rangeClosed(21, 25) .mapToObj(i -> format("(200, %d, DATE '2020-07-%d')", i, i)) .collect(joining(", ")), 5); assertUpdate(insertStart + " VALUES " + IntStream.rangeClosed(26, 30) .mapToObj(i -> format("(NULL, %d, DATE '2020-06-%d')", i, i)) .collect(joining(", ")), 5); result = computeActual("SHOW STATS FOR " + tableName); expectedStatistics = resultBuilder(getSession(), VARCHAR, DOUBLE, DOUBLE, DOUBLE, DOUBLE, VARCHAR, VARCHAR) .row("col1", null, null, 5.0 / 13.0, null, "-10.0", "200.0") .row("col2", null, null, 0.0, null, "-1", "30") .row("col3", null, null, 0.0, null, "2019-06-28", "2020-07-25") .row(null, null, null, null, 13.0, null, null) .build(); assertEquals(result, expectedStatistics); dropTable(tableName); } @Test public void testPartitionedTableStatistics() { assertUpdate("CREATE TABLE iceberg.tpch.test_partitioned_table_statistics (col1 REAL, col2 BIGINT) WITH (partitioning = ARRAY['col2'])"); String insertStart = "INSERT INTO test_partitioned_table_statistics"; assertUpdate(insertStart + " VALUES (-10, -1)", 1); assertUpdate(insertStart + " VALUES (100, 10)", 1); MaterializedResult result = computeActual("SHOW STATS FOR iceberg.tpch.test_partitioned_table_statistics"); assertEquals(result.getRowCount(), 3); MaterializedRow row0 = result.getMaterializedRows().get(0); assertEquals(row0.getField(0), "col1"); assertEquals(row0.getField(3), 0.0); assertEquals(row0.getField(5), "-10.0"); assertEquals(row0.getField(6), "100.0"); MaterializedRow row1 = result.getMaterializedRows().get(1); assertEquals(row1.getField(0), "col2"); assertEquals(row1.getField(3), 0.0); assertEquals(row1.getField(5), "-1"); assertEquals(row1.getField(6), "10"); MaterializedRow row2 = result.getMaterializedRows().get(2); assertEquals(row2.getField(4), 2.0); assertUpdate(insertStart + " VALUES " + IntStream.rangeClosed(1, 5) .mapToObj(i -> format("(%d, 10)", i + 100)) .collect(joining(", ")), 5); assertUpdate(insertStart + " VALUES " + IntStream.rangeClosed(6, 10) .mapToObj(i -> "(NULL, 10)") .collect(joining(", ")), 5); result = computeActual("SHOW STATS FOR iceberg.tpch.test_partitioned_table_statistics"); assertEquals(result.getRowCount(), 3); row0 = result.getMaterializedRows().get(0); assertEquals(row0.getField(0), "col1"); assertEquals(row0.getField(3), 5.0 / 12.0); assertEquals(row0.getField(5), "-10.0"); assertEquals(row0.getField(6), "105.0"); row1 = result.getMaterializedRows().get(1); assertEquals(row1.getField(0), "col2"); assertEquals(row1.getField(3), 0.0); assertEquals(row1.getField(5), "-1"); assertEquals(row1.getField(6), "10"); row2 = result.getMaterializedRows().get(2); assertEquals(row2.getField(4), 12.0); assertUpdate(insertStart + " VALUES " + IntStream.rangeClosed(6, 10) .mapToObj(i -> "(100, NULL)") .collect(joining(", ")), 5); result = computeActual("SHOW STATS FOR iceberg.tpch.test_partitioned_table_statistics"); row0 = result.getMaterializedRows().get(0); assertEquals(row0.getField(0), "col1"); assertEquals(row0.getField(3), 5.0 / 17.0); assertEquals(row0.getField(5), "-10.0"); assertEquals(row0.getField(6), "105.0"); row1 = result.getMaterializedRows().get(1); assertEquals(row1.getField(0), "col2"); assertEquals(row1.getField(3), 5.0 / 17.0); assertEquals(row1.getField(5), "-1"); assertEquals(row1.getField(6), "10"); row2 = result.getMaterializedRows().get(2); assertEquals(row2.getField(4), 17.0); dropTable("iceberg.tpch.test_partitioned_table_statistics"); } @Test public void testStatisticsConstraints() { String tableName = "iceberg.tpch.test_simple_partitioned_table_statistics"; assertUpdate("CREATE TABLE iceberg.tpch.test_simple_partitioned_table_statistics (col1 BIGINT, col2 BIGINT) WITH (partitioning = ARRAY['col1'])"); String insertStart = "INSERT INTO iceberg.tpch.test_simple_partitioned_table_statistics"; assertUpdate(insertStart + " VALUES (1, 101), (2, 102), (3, 103), (4, 104)", 4); TableStatistics tableStatistics = getTableStatistics(tableName, new Constraint(TupleDomain.all())); IcebergColumnHandle col1Handle = getColumnHandleFromStatistics(tableStatistics, "col1"); // Constraint.predicate is currently not supported, because it's never provided by the engine. // TODO add (restore) test coverage when this changes. // predicate on a partition column assertThatThrownBy(() -> getTableStatistics(tableName, new Constraint( TupleDomain.all(), Optional.of(new TestRelationalNumberPredicate("col1", 3, i1 -> i1 >= 0)), Optional.of(ImmutableSet.of(col1Handle))))) .isInstanceOf(VerifyException.class) .hasMessage("Unexpected Constraint predicate"); // predicate on an unspecified set of columns column assertThatThrownBy(() -> getTableStatistics(tableName, new Constraint( TupleDomain.all(), Optional.of(new TestRelationalNumberPredicate("col2", 102, i -> i >= 0)), Optional.empty()))) .isInstanceOf(VerifyException.class) .hasMessage("Unexpected Constraint predicate"); dropTable(tableName); } @Test public void testPredicatePushdown() { QualifiedObjectName tableName = new QualifiedObjectName("iceberg", "tpch", "test_predicate"); assertUpdate(format("CREATE TABLE %s (col1 BIGINT, col2 BIGINT, col3 BIGINT) WITH (partitioning = ARRAY['col2', 'col3'])", tableName)); assertUpdate(format("INSERT INTO %s VALUES (1, 10, 100)", tableName), 1L); assertUpdate(format("INSERT INTO %s VALUES (2, 20, 200)", tableName), 1L); assertQuery(format("SELECT * FROM %s WHERE col1 = 1", tableName), "VALUES (1, 10, 100)"); assertFilterPushdown( tableName, ImmutableMap.of("col1", singleValue(BIGINT, 1L)), ImmutableMap.of(), ImmutableMap.of("col1", singleValue(BIGINT, 1L))); assertQuery(format("SELECT * FROM %s WHERE col2 = 10", tableName), "VALUES (1, 10, 100)"); assertFilterPushdown( tableName, ImmutableMap.of("col2", singleValue(BIGINT, 10L)), ImmutableMap.of("col2", singleValue(BIGINT, 10L)), ImmutableMap.of()); assertQuery(format("SELECT * FROM %s WHERE col1 = 1 AND col2 = 10", tableName), "VALUES (1, 10, 100)"); assertFilterPushdown( tableName, ImmutableMap.of("col1", singleValue(BIGINT, 1L), "col2", singleValue(BIGINT, 10L)), ImmutableMap.of("col2", singleValue(BIGINT, 10L)), ImmutableMap.of("col1", singleValue(BIGINT, 1L))); // Assert pushdown for an IN predicate with value count above the default compaction threshold List<Long> values = LongStream.range(1L, 1010L).boxed() .filter(index -> index != 20L) .collect(toImmutableList()); assertTrue(values.size() > ICEBERG_DOMAIN_COMPACTION_THRESHOLD); String valuesString = String.join(",", values.stream().map(Object::toString).collect(toImmutableList())); String inPredicate = "%s IN (" + valuesString + ")"; assertQuery( format("SELECT * FROM %s WHERE %s AND %s", tableName, format(inPredicate, "col1"), format(inPredicate, "col2")), "VALUES (1, 10, 100)"); assertFilterPushdown( tableName, ImmutableMap.of("col1", multipleValues(BIGINT, values), "col2", multipleValues(BIGINT, values)), ImmutableMap.of("col2", multipleValues(BIGINT, values)), // Unenforced predicate is simplified during split generation, but not reflected here ImmutableMap.of("col1", multipleValues(BIGINT, values))); dropTable(tableName.getObjectName()); } private void assertFilterPushdown( QualifiedObjectName tableName, Map<String, Domain> filter, Map<String, Domain> expectedEnforcedPredicate, Map<String, Domain> expectedUnenforcedPredicate) { Metadata metadata = getQueryRunner().getMetadata(); newTransaction().execute(getSession(), session -> { TableHandle table = metadata.getTableHandle(session, tableName) .orElseThrow(() -> new TableNotFoundException(tableName.asSchemaTableName())); Map<String, ColumnHandle> columns = metadata.getColumnHandles(session, table); TupleDomain<ColumnHandle> domains = TupleDomain.withColumnDomains( filter.entrySet().stream() .collect(toImmutableMap(entry -> columns.get(entry.getKey()), Map.Entry::getValue))); Optional<ConstraintApplicationResult<TableHandle>> result = metadata.applyFilter(session, table, new Constraint(domains)); assertTrue(result.isEmpty() == (expectedUnenforcedPredicate == null && expectedEnforcedPredicate == null)); if (result.isPresent()) { IcebergTableHandle newTable = (IcebergTableHandle) result.get().getHandle().getConnectorHandle(); assertEquals( newTable.getEnforcedPredicate(), TupleDomain.withColumnDomains(expectedEnforcedPredicate.entrySet().stream() .collect(toImmutableMap(entry -> columns.get(entry.getKey()), Map.Entry::getValue)))); assertEquals( newTable.getUnenforcedPredicate(), TupleDomain.withColumnDomains(expectedUnenforcedPredicate.entrySet().stream() .collect(toImmutableMap(entry -> columns.get(entry.getKey()), Map.Entry::getValue)))); } }); } private TransactionBuilder newTransaction() { return transaction(getQueryRunner().getTransactionManager(), getQueryRunner().getAccessControl()); } private static class TestRelationalNumberPredicate implements Predicate<Map<ColumnHandle, NullableValue>> { private final String columnName; private final Number comparand; private final Predicate<Integer> comparePredicate; public TestRelationalNumberPredicate(String columnName, Number comparand, Predicate<Integer> comparePredicate) { this.columnName = columnName; this.comparand = comparand; this.comparePredicate = comparePredicate; } @Override public boolean test(Map<ColumnHandle, NullableValue> nullableValues) { for (Map.Entry<ColumnHandle, NullableValue> entry : nullableValues.entrySet()) { IcebergColumnHandle handle = (IcebergColumnHandle) entry.getKey(); if (columnName.equals(handle.getName())) { Object object = entry.getValue().getValue(); if (object instanceof Long) { return comparePredicate.test(((Long) object).compareTo(comparand.longValue())); } if (object instanceof Double) { return comparePredicate.test(((Double) object).compareTo(comparand.doubleValue())); } throw new IllegalArgumentException(format("NullableValue is neither Long or Double, but %s", object)); } } return false; } } private ColumnStatistics getStatisticsForColumn(TableStatistics tableStatistics, String columnName) { for (Map.Entry<ColumnHandle, ColumnStatistics> entry : tableStatistics.getColumnStatistics().entrySet()) { IcebergColumnHandle handle = (IcebergColumnHandle) entry.getKey(); if (handle.getName().equals(columnName)) { return checkColumnStatistics(entry.getValue()); } } throw new IllegalArgumentException("TableStatistics did not contain column named " + columnName); } private static IcebergColumnHandle getColumnHandleFromStatistics(TableStatistics tableStatistics, String columnName) { for (ColumnHandle columnHandle : tableStatistics.getColumnStatistics().keySet()) { IcebergColumnHandle handle = (IcebergColumnHandle) columnHandle; if (handle.getName().equals(columnName)) { return handle; } } throw new IllegalArgumentException("TableStatistics did not contain column named " + columnName); } private ColumnStatistics checkColumnStatistics(ColumnStatistics statistics) { assertNotNull(statistics, "statistics is null"); // Sadly, statistics.getDataSize().isUnknown() for columns in ORC files. See the TODO // in IcebergOrcFileWriter. if (format == ORC) { assertTrue(statistics.getDataSize().isUnknown()); } else { assertFalse(statistics.getDataSize().isUnknown()); } assertFalse(statistics.getNullsFraction().isUnknown(), "statistics nulls fraction is unknown"); assertFalse(statistics.getRange().isEmpty(), "statistics range is not present"); return statistics; } private TableStatistics getTableStatistics(String tableName, Constraint constraint) { Metadata metadata = getDistributedQueryRunner().getCoordinator().getMetadata(); QualifiedObjectName qualifiedName = QualifiedObjectName.valueOf(tableName); return transaction(getQueryRunner().getTransactionManager(), getQueryRunner().getAccessControl()) .execute(getSession(), session -> { Optional<TableHandle> optionalHandle = metadata.getTableHandle(session, qualifiedName); checkArgument(optionalHandle.isPresent(), "Could not create table handle for table %s", tableName); return metadata.getTableStatistics(session, optionalHandle.get(), constraint); }); } @Test public void testCreateNestedPartitionedTable() { assertUpdate("CREATE TABLE test_nested_table_1 (" + " bool BOOLEAN" + ", int INTEGER" + ", arr ARRAY(VARCHAR)" + ", big BIGINT" + ", rl REAL" + ", dbl DOUBLE" + ", mp MAP(INTEGER, VARCHAR)" + ", dec DECIMAL(5,2)" + ", vc VARCHAR" + ", vb VARBINARY" + ", ts TIMESTAMP(6)" + ", str ROW(id INTEGER , vc VARCHAR)" + ", dt DATE)" + " WITH (partitioning = ARRAY['int'])"); assertUpdate( "INSERT INTO test_nested_table_1 " + " select true, 1, array['uno', 'dos', 'tres'], BIGINT '1', REAL '1.0', DOUBLE '1.0', map(array[1,2,3,4], array['ek','don','teen','char'])," + " CAST(1.0 as DECIMAL(5,2))," + " 'one', VARBINARY 'binary0/1values',\n" + " TIMESTAMP '2021-07-24 02:43:57.348000'," + " (CAST(ROW(null, 'this is a random value') AS ROW(int, varchar))), " + " DATE '2021-07-24'", 1); assertEquals(computeActual("SELECT * from test_nested_table_1").getRowCount(), 1); assertThat(query("SHOW STATS FOR test_nested_table_1")) .projected(0, 2, 3, 4, 5, 6) // ignore data size which is available for Parquet, but not for ORC .skippingTypesCheck() .matches("VALUES " + " ('bool', NULL, 0e0, NULL, NULL, NULL), " + " ('int', NULL, 0e0, NULL, '1', '1'), " + " ('arr', NULL, " + (format == ORC ? "0e0" : "NULL") + ", NULL, NULL, NULL), " + " ('big', NULL, 0e0, NULL, '1', '1'), " + " ('rl', NULL, 0e0, NULL, '1.0', '1.0'), " + " ('dbl', NULL, 0e0, NULL, '1.0', '1.0'), " + " ('mp', NULL, " + (format == ORC ? "0e0" : "NULL") + ", NULL, NULL, NULL), " + " ('dec', NULL, 0e0, NULL, '1.0', '1.0'), " + " ('vc', NULL, 0e0, NULL, NULL, NULL), " + " ('vb', NULL, 0e0, NULL, NULL, NULL), " + " ('ts', NULL, 0e0, NULL, " + (format == ORC ? "NULL, NULL" : "'2021-07-24 02:43:57.348000', '2021-07-24 02:43:57.348000'") + "), " + " ('str', NULL, " + (format == ORC ? "0e0" : "NULL") + ", NULL, NULL, NULL), " + " ('dt', NULL, 0e0, NULL, '2021-07-24', '2021-07-24'), " + " (NULL, NULL, NULL, 1e0, NULL, NULL)"); dropTable("test_nested_table_1"); assertUpdate("" + "CREATE TABLE test_nested_table_2 (" + " int INTEGER" + ", arr ARRAY(ROW(id INTEGER, vc VARCHAR))" + ", big BIGINT" + ", rl REAL" + ", dbl DOUBLE" + ", mp MAP(INTEGER, ARRAY(VARCHAR))" + ", dec DECIMAL(5,2)" + ", str ROW(id INTEGER, vc VARCHAR, arr ARRAY(INTEGER))" + ", vc VARCHAR)" + " WITH (partitioning = ARRAY['int'])"); assertUpdate( "INSERT INTO test_nested_table_2 " + " select 1, array[cast(row(1, null) as row(int, varchar)), cast(row(2, 'dos') as row(int, varchar))], BIGINT '1', REAL '1.0', DOUBLE '1.0', " + "map(array[1,2], array[array['ek', 'one'], array['don', 'do', 'two']]), CAST(1.0 as DECIMAL(5,2)), " + "CAST(ROW(1, 'this is a random value', null) AS ROW(int, varchar, array(int))), 'one'", 1); assertEquals(computeActual("SELECT * from test_nested_table_2").getRowCount(), 1); assertThat(query("SHOW STATS FOR test_nested_table_2")) .projected(0, 2, 3, 4, 5, 6) // ignore data size which is available for Parquet, but not for ORC .skippingTypesCheck() .matches("VALUES " + " ('int', NULL, 0e0, NULL, '1', '1'), " + " ('arr', NULL, " + (format == ORC ? "0e0" : "NULL") + ", NULL, NULL, NULL), " + " ('big', NULL, 0e0, NULL, '1', '1'), " + " ('rl', NULL, 0e0, NULL, '1.0', '1.0'), " + " ('dbl', NULL, 0e0, NULL, '1.0', '1.0'), " + " ('mp', NULL, " + (format == ORC ? "0e0" : "NULL") + ", NULL, NULL, NULL), " + " ('dec', NULL, 0e0, NULL, '1.0', '1.0'), " + " ('vc', NULL, 0e0, NULL, NULL, NULL), " + " ('str', NULL, " + (format == ORC ? "0e0" : "NULL") + ", NULL, NULL, NULL), " + " (NULL, NULL, NULL, 1e0, NULL, NULL)"); assertUpdate("CREATE TABLE test_nested_table_3 WITH (partitioning = ARRAY['int']) AS SELECT * FROM test_nested_table_2", 1); assertEquals(computeActual("SELECT * FROM test_nested_table_3").getRowCount(), 1); assertThat(query("SHOW STATS FOR test_nested_table_3")) .matches("SHOW STATS FOR test_nested_table_2"); dropTable("test_nested_table_2"); dropTable("test_nested_table_3"); } @Test public void testSerializableReadIsolation() { assertUpdate("CREATE TABLE test_read_isolation (x int)"); assertUpdate("INSERT INTO test_read_isolation VALUES 123, 456", 2); withTransaction(session -> { assertQuery(session, "SELECT * FROM test_read_isolation", "VALUES 123, 456"); assertUpdate("INSERT INTO test_read_isolation VALUES 789", 1); assertQuery("SELECT * FROM test_read_isolation", "VALUES 123, 456, 789"); assertQuery(session, "SELECT * FROM test_read_isolation", "VALUES 123, 456"); }); assertQuery("SELECT * FROM test_read_isolation", "VALUES 123, 456, 789"); dropTable("test_read_isolation"); } private void withTransaction(Consumer<Session> consumer) { transaction(getQueryRunner().getTransactionManager(), getQueryRunner().getAccessControl()) .readCommitted() .execute(getSession(), consumer); } private void dropTable(String table) { Session session = getSession(); assertUpdate(session, "DROP TABLE " + table); assertFalse(getQueryRunner().tableExists(session, table)); } @Test public void testOptimizedMetadataQueries() { Session session = Session.builder(getSession()) .setSystemProperty("optimize_metadata_queries", "true") .build(); assertUpdate("CREATE TABLE test_metadata_optimization (a BIGINT, b BIGINT, c BIGINT) WITH (PARTITIONING = ARRAY['b', 'c'])"); assertUpdate("INSERT INTO test_metadata_optimization VALUES (5, 6, 7), (8, 9, 10)", 2); assertQuery(session, "SELECT DISTINCT b FROM test_metadata_optimization", "VALUES (6), (9)"); assertQuery(session, "SELECT DISTINCT b, c FROM test_metadata_optimization", "VALUES (6, 7), (9, 10)"); assertQuery(session, "SELECT DISTINCT b FROM test_metadata_optimization WHERE b < 7", "VALUES (6)"); assertQuery(session, "SELECT DISTINCT b FROM test_metadata_optimization WHERE c > 8", "VALUES (9)"); // Assert behavior after metadata delete assertUpdate("DELETE FROM test_metadata_optimization WHERE b = 6"); assertQuery(session, "SELECT DISTINCT b FROM test_metadata_optimization", "VALUES (9)"); // TODO: assert behavior after deleting the last row of a partition, once row-level deletes are supported. // i.e. a query like 'DELETE FROM test_metadata_optimization WHERE b = 6 AND a = 5' dropTable("test_metadata_optimization"); } @Test public void testIncorrectIcebergFileSizes() throws Exception { // Create a table with a single insert assertUpdate("CREATE TABLE test_iceberg_file_size (x BIGINT) WITH (format='PARQUET')"); assertUpdate("INSERT INTO test_iceberg_file_size VALUES (123), (456), (758)", 3); // Get manifest file MaterializedResult result = computeActual("SELECT path FROM \"test_iceberg_file_size$manifests\""); assertEquals(result.getRowCount(), 1); String manifestFile = (String) result.getOnlyValue(); // Read manifest file Schema schema; GenericData.Record entry = null; try (DataFileReader<GenericData.Record> dataFileReader = new DataFileReader<>(new File(manifestFile), new GenericDatumReader<>())) { schema = dataFileReader.getSchema(); int recordCount = 0; while (dataFileReader.hasNext()) { entry = dataFileReader.next(); recordCount++; } assertEquals(recordCount, 1); } // Alter data file entry to store incorrect file size GenericData.Record dataFile = (GenericData.Record) entry.get("data_file"); long alteredValue = 50L; assertNotEquals((long) dataFile.get("file_size_in_bytes"), alteredValue); dataFile.put("file_size_in_bytes", alteredValue); // Replace the file through HDFS client. This is required for correct checksums. HdfsEnvironment.HdfsContext context = new HdfsContext(getSession().toConnectorSession()); Path manifestFilePath = new Path(manifestFile); FileSystem fs = HDFS_ENVIRONMENT.getFileSystem(context, manifestFilePath); // Write altered metadata try (OutputStream out = fs.create(manifestFilePath); DataFileWriter<GenericData.Record> dataFileWriter = new DataFileWriter<>(new GenericDatumWriter<>(schema))) { dataFileWriter.create(schema, out); dataFileWriter.append(entry); } // Ignoring Iceberg provided file size makes the query succeed Session session = Session.builder(getSession()) .setCatalogSessionProperty("iceberg", "use_file_size_from_metadata", "false") .build(); assertQuery(session, "SELECT * FROM test_iceberg_file_size", "VALUES (123), (456), (758)"); // Using Iceberg provided file size fails the query assertQueryFails("SELECT * FROM test_iceberg_file_size", format("Error reading tail from .* with length %d", alteredValue)); dropTable("test_iceberg_file_size"); } @Override protected TestTable createTableWithDefaultColumns() { throw new SkipException("Iceberg connector does not support column default values"); } @Override protected Optional<DataMappingTestSetup> filterDataMappingSmokeTestData(DataMappingTestSetup dataMappingTestSetup) { String typeName = dataMappingTestSetup.getTrinoTypeName(); if (typeName.equals("tinyint") || typeName.equals("smallint") || typeName.startsWith("char(")) { // These types are not supported by Iceberg return Optional.of(dataMappingTestSetup.asUnsupported()); } // According to Iceberg specification all time and timestamp values are stored with microsecond precision. if (typeName.equals("time")) { return Optional.of(new DataMappingTestSetup("time(6)", "TIME '15:03:00'", "TIME '23:59:59.999999'")); } if (typeName.equals("timestamp")) { return Optional.of(new DataMappingTestSetup("timestamp(6)", "TIMESTAMP '2020-02-12 15:03:00'", "TIMESTAMP '2199-12-31 23:59:59.999999'")); } if (typeName.equals("timestamp(3) with time zone")) { return Optional.of(new DataMappingTestSetup("timestamp(6) with time zone", "TIMESTAMP '2020-02-12 15:03:00 +01:00'", "TIMESTAMP '9999-12-31 23:59:59.999999 +12:00'")); } return Optional.of(dataMappingTestSetup); } @Override protected Optional<DataMappingTestSetup> filterCaseSensitiveDataMappingTestData(DataMappingTestSetup dataMappingTestSetup) { String typeName = dataMappingTestSetup.getTrinoTypeName(); if (typeName.equals("char(1)")) { return Optional.of(dataMappingTestSetup.asUnsupported()); } return Optional.of(dataMappingTestSetup); } }
Remove redundant table name qualification
plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/BaseIcebergConnectorTest.java
Remove redundant table name qualification
<ide><path>lugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/BaseIcebergConnectorTest.java <ide> @Test <ide> public void testLargeInFailureOnPartitionedColumns() <ide> { <del> QualifiedObjectName tableName = new QualifiedObjectName("iceberg", "tpch", "test_large_in_failure"); <del> assertUpdate(format( <del> "CREATE TABLE %s (col1 BIGINT, col2 BIGINT) WITH (partitioning = ARRAY['col2'])", <del> tableName)); <del> assertUpdate(format("INSERT INTO %s VALUES (1, 10)", tableName), 1L); <del> assertUpdate(format("INSERT INTO %s VALUES (2, 20)", tableName), 1L); <add> assertUpdate("CREATE TABLE test_large_in_failure (col1 BIGINT, col2 BIGINT) WITH (partitioning = ARRAY['col2'])"); <add> assertUpdate("INSERT INTO test_large_in_failure VALUES (1, 10)", 1L); <add> assertUpdate("INSERT INTO test_large_in_failure VALUES (2, 20)", 1L); <ide> <ide> List<String> predicates = IntStream.range(0, 5000).boxed() <ide> .map(Object::toString) <ide> .collect(toImmutableList()); <ide> <ide> String filter = format("col2 IN (%s)", String.join(",", predicates)); <del> <del> assertThatThrownBy(() -> getQueryRunner().execute(format("SELECT * FROM %s WHERE %s", tableName, filter))) <add> assertThatThrownBy(() -> getQueryRunner().execute(format("SELECT * FROM test_large_in_failure WHERE %s", filter))) <ide> .isInstanceOf(RuntimeException.class) <ide> .hasMessage("java.lang.StackOverflowError"); <ide>
Java
apache-2.0
01215c762663842235c7b89303ecca4afe4720a7
0
coplas/SqliteDDLHelper
package sk.coplas.sqliteddlhelper; import java.util.ArrayList; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.List; /** * Created by coplas on 10/2/14. */ public class DDLBuilder { private String tableName; private LinkedHashMap<String, ColumnType> columns; private String primaryKey; private String autoincrement; private String columnNameCache; private List<String> notNullcolumns; private boolean isAlter; private DDLBuilder(){ columns = new LinkedHashMap<String, ColumnType>(); notNullcolumns = new ArrayList<String>(); } public static DDLBuilder createTable(String tableName) { DDLBuilder builder = new DDLBuilder(); builder.tableName = tableName; builder.isAlter = false; return builder; } public static DDLBuilder alterTable(String tableName) { DDLBuilder builder = new DDLBuilder(); builder.tableName = tableName; builder.isAlter = true; return builder; } public String build(){ StringBuilder sb = new StringBuilder(); if (tableName == null || tableName.length() < 0) { throw new IllegalArgumentException("Table name is required"); } if (isAlter) { if (columns.size() != 1) { throw new IllegalArgumentException("One column is required in SQLite ALTER"); } else { //get first column String columnName = columns.keySet().iterator().next(); ColumnType columnType = columns.get( columnName); sb.append("ALTER TABLE "); sb.append(tableName); sb.append(" ADD COLUMN "); sb.append(columnName); sb.append(" "); sb.append(columnType.toString()); createColumnParams(sb, columnName); } } else { if (columns.size() < 1) { throw new IllegalArgumentException("At least one column is required in SQLite CREATE"); } else { sb.append("CREATE TABLE "); sb.append(tableName); sb.append(" ( "); final Iterator<String> columnsIterator = columns.keySet().iterator(); while (columnsIterator.hasNext()) { String columnName = columnsIterator.next(); ColumnType columnType = columns.get( columnName); sb.append(columnName); sb.append(" "); sb.append(columnType.toString()); createColumnParams(sb, columnName); if (columnsIterator.hasNext()) { sb.append(", "); } } sb.append(")"); } } return sb.toString(); } private void createColumnParams(StringBuilder sb, String columnName) { if (columnName.equals( primaryKey)) { sb.append(" PRIMARY KEY"); } if (columnName.equals( autoincrement)) { sb.append(" AUTOINCREMENT"); } if (notNullcolumns.contains( columnName)) { sb.append(" NOT NULL"); } } public static enum ColumnType { TEXT,NUMERIC,INTEGER,REAL,BOOLEAN, NONE } private class SingleColumnBuilder{ private String name; private ColumnType type; private boolean pk; private boolean autoIncrement; private boolean notNull; public DDLBuilder pk(String columnName) { this.primaryKey = columnName; return this; } public DDLBuilder autoIncrement(String columnName) { this.primaryKey = columnName; return this; } public DDLBuilder pk() { this.primaryKey = columnNameCache; return this; } public DDLBuilder autoIncrement() { this.autoincrement = columnNameCache; return this; } public DDLBuilder notNull() { this.notNullcolumns.add( columnNameCache); return this; } public DDLBuilder text(String columnName) { return column(columnName, ColumnType.TEXT); } public DDLBuilder numeric(String columnName) { return column(columnName, ColumnType.NUMERIC); } public DDLBuilder integer(String columnName) { return column(columnName, ColumnType.INTEGER); } public DDLBuilder real(String columnName) { return column(columnName, ColumnType.REAL); } public DDLBuilder bool(String columnName) { return column(columnName, ColumnType.BOOLEAN); } public DDLBuilder none(String columnName) { return column(columnName, ColumnType.NONE); } public DDLBuilder column(String columnName, ColumnType columnType) { if (isAlter && columns.size() > 0) { throw new IllegalArgumentException("Only one column is allowed in SQLite ALTER"); //http://stackoverflow.com/questions/6172815/sqlite-alter-createTableBuilder-add-multiple-columns-in-a-single-statement } if (!columns.containsKey(columnName)) { columns.put(columnName, columnType); } columnNameCache = columnName; return this; } } private class MultiColumnBuilder extends SingleColumnBuilder{ private List<SingleColumnBuilder> columns; private SingleColumnBuilder column; private MultiColumnBuilder() { columns = new ArrayList<SingleColumnBuilder>(); } public MultiColumnBuilder column() { if (column != null) { columns.add( column); } column = new SingleColumnBuilder(); return this; } } }
library/src/main/java/sk/coplas/sqliteddlhelper/DDLBuilder.java
package sk.coplas.sqliteddlhelper; import java.util.ArrayList; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.List; /** * Created by coplas on 10/2/14. */ public class DDLBuilder { public static enum ColumnType { TEXT,NUMERIC,INTEGER,REAL,BOOLEAN, NONE } private String tableName; private LinkedHashMap<String, ColumnType> columns; private String primaryKey; private String autoincrement; private String columnNameCache; private List<String> notNullcolumns; private boolean isAlter; private DDLBuilder(){ columns = new LinkedHashMap<String, ColumnType>(); notNullcolumns = new ArrayList<String>(); } public static DDLBuilder createTable(String tableName) { DDLBuilder builder = new DDLBuilder(); builder.tableName = tableName; builder.isAlter = false; return builder; } public static DDLBuilder alterTable(String tableName) { DDLBuilder builder = new DDLBuilder(); builder.tableName = tableName; builder.isAlter = true; return builder; } public DDLBuilder pk(String columnName) { this.primaryKey = columnName; return this; } public DDLBuilder autoIncrement(String columnName) { this.primaryKey = columnName; return this; } public DDLBuilder pk() { this.primaryKey = columnNameCache; return this; } public DDLBuilder autoIncrement() { this.autoincrement = columnNameCache; return this; } public DDLBuilder notNull() { this.notNullcolumns.add( columnNameCache); return this; } public DDLBuilder text(String columnName) { return column(columnName, ColumnType.TEXT); } public DDLBuilder numeric(String columnName) { return column(columnName, ColumnType.NUMERIC); } public DDLBuilder integer(String columnName) { return column(columnName, ColumnType.INTEGER); } public DDLBuilder real(String columnName) { return column(columnName, ColumnType.REAL); } public DDLBuilder bool(String columnName) { return column(columnName, ColumnType.BOOLEAN); } public DDLBuilder none(String columnName) { return column(columnName, ColumnType.NONE); } public DDLBuilder column(String columnName, ColumnType columnType) { if (isAlter && columns.size() > 0) { throw new IllegalArgumentException("Only one column is allowed in SQLite ALTER"); //http://stackoverflow.com/questions/6172815/sqlite-alter-createTableBuilder-add-multiple-columns-in-a-single-statement } if (!columns.containsKey(columnName)) { columns.put(columnName, columnType); } columnNameCache = columnName; return this; } public String build(){ StringBuilder sb = new StringBuilder(); if (tableName == null || tableName.length() < 0) { throw new IllegalArgumentException("Table name is required"); } if (isAlter) { if (columns.size() != 1) { throw new IllegalArgumentException("One column is required in SQLite ALTER"); } else { //get first column String columnName = columns.keySet().iterator().next(); ColumnType columnType = columns.get( columnName); sb.append("ALTER TABLE "); sb.append(tableName); sb.append(" ADD COLUMN "); sb.append(columnName); sb.append(" "); sb.append(columnType.toString()); createColumnParams(sb, columnName); } } else { if (columns.size() < 1) { throw new IllegalArgumentException("At least one column is required in SQLite CREATE"); } else { sb.append("CREATE TABLE "); sb.append(tableName); sb.append(" ( "); final Iterator<String> columnsIterator = columns.keySet().iterator(); while (columnsIterator.hasNext()) { String columnName = columnsIterator.next(); ColumnType columnType = columns.get( columnName); sb.append(columnName); sb.append(" "); sb.append(columnType.toString()); createColumnParams(sb, columnName); if (columnsIterator.hasNext()) { sb.append(", "); } } sb.append(")"); } } return sb.toString(); } private void createColumnParams(StringBuilder sb, String columnName) { if (columnName.equals( primaryKey)) { sb.append(" PRIMARY KEY"); } if (columnName.equals( autoincrement)) { sb.append(" AUTOINCREMENT"); } if (notNullcolumns.contains( columnName)) { sb.append(" NOT NULL"); } } }
work in progress
library/src/main/java/sk/coplas/sqliteddlhelper/DDLBuilder.java
work in progress
<ide><path>ibrary/src/main/java/sk/coplas/sqliteddlhelper/DDLBuilder.java <ide> */ <ide> public class DDLBuilder { <ide> <del> public static enum ColumnType { <del> TEXT,NUMERIC,INTEGER,REAL,BOOLEAN, NONE <del> } <ide> <ide> private String tableName; <ide> private LinkedHashMap<String, ColumnType> columns; <ide> return builder; <ide> } <ide> <del> public DDLBuilder pk(String columnName) { <del> this.primaryKey = columnName; <del> return this; <del> } <del> <del> public DDLBuilder autoIncrement(String columnName) { <del> this.primaryKey = columnName; <del> return this; <del> } <del> <del> public DDLBuilder pk() { <del> this.primaryKey = columnNameCache; <del> return this; <del> } <del> <del> public DDLBuilder autoIncrement() { <del> this.autoincrement = columnNameCache; <del> return this; <del> } <del> <del> public DDLBuilder notNull() { <del> this.notNullcolumns.add( columnNameCache); <del> return this; <del> } <del> <del> <del> <del> public DDLBuilder text(String columnName) { <del> return column(columnName, ColumnType.TEXT); <del> } <del> <del> public DDLBuilder numeric(String columnName) { <del> return column(columnName, ColumnType.NUMERIC); <del> } <del> <del> public DDLBuilder integer(String columnName) { <del> return column(columnName, ColumnType.INTEGER); <del> } <del> <del> public DDLBuilder real(String columnName) { <del> return column(columnName, ColumnType.REAL); <del> } <del> <del> public DDLBuilder bool(String columnName) { <del> return column(columnName, ColumnType.BOOLEAN); <del> } <del> <del> public DDLBuilder none(String columnName) { <del> return column(columnName, ColumnType.NONE); <del> } <del> <del> <del> public DDLBuilder column(String columnName, ColumnType columnType) { <del> if (isAlter && columns.size() > 0) { <del> throw new IllegalArgumentException("Only one column is allowed in SQLite ALTER"); <del> //http://stackoverflow.com/questions/6172815/sqlite-alter-createTableBuilder-add-multiple-columns-in-a-single-statement <del> } <del> <del> if (!columns.containsKey(columnName)) { <del> columns.put(columnName, columnType); <del> } <del> columnNameCache = columnName; <del> return this; <del> } <ide> <ide> public String build(){ <ide> StringBuilder sb = new StringBuilder(); <ide> } <ide> } <ide> <add> public static enum ColumnType { <add> TEXT,NUMERIC,INTEGER,REAL,BOOLEAN, NONE <add> } <add> <add> private class SingleColumnBuilder{ <add> <add> <add> private String name; <add> private ColumnType type; <add> private boolean pk; <add> private boolean autoIncrement; <add> private boolean notNull; <add> <add> public DDLBuilder pk(String columnName) { <add> this.primaryKey = columnName; <add> return this; <add> } <add> <add> public DDLBuilder autoIncrement(String columnName) { <add> this.primaryKey = columnName; <add> return this; <add> } <add> <add> public DDLBuilder pk() { <add> this.primaryKey = columnNameCache; <add> return this; <add> } <add> <add> public DDLBuilder autoIncrement() { <add> this.autoincrement = columnNameCache; <add> return this; <add> } <add> <add> public DDLBuilder notNull() { <add> this.notNullcolumns.add( columnNameCache); <add> return this; <add> } <add> <add> <add> <add> public DDLBuilder text(String columnName) { <add> return column(columnName, ColumnType.TEXT); <add> } <add> <add> public DDLBuilder numeric(String columnName) { <add> return column(columnName, ColumnType.NUMERIC); <add> } <add> <add> public DDLBuilder integer(String columnName) { <add> return column(columnName, ColumnType.INTEGER); <add> } <add> <add> public DDLBuilder real(String columnName) { <add> return column(columnName, ColumnType.REAL); <add> } <add> <add> public DDLBuilder bool(String columnName) { <add> return column(columnName, ColumnType.BOOLEAN); <add> } <add> <add> public DDLBuilder none(String columnName) { <add> return column(columnName, ColumnType.NONE); <add> } <add> <add> <add> public DDLBuilder column(String columnName, ColumnType columnType) { <add> if (isAlter && columns.size() > 0) { <add> throw new IllegalArgumentException("Only one column is allowed in SQLite ALTER"); <add> //http://stackoverflow.com/questions/6172815/sqlite-alter-createTableBuilder-add-multiple-columns-in-a-single-statement <add> } <add> <add> if (!columns.containsKey(columnName)) { <add> columns.put(columnName, columnType); <add> } <add> columnNameCache = columnName; <add> return this; <add> } <add> <add> } <add> <add> private class MultiColumnBuilder extends SingleColumnBuilder{ <add> private List<SingleColumnBuilder> columns; <add> <add> private SingleColumnBuilder column; <add> <add> private MultiColumnBuilder() { <add> columns = new ArrayList<SingleColumnBuilder>(); <add> } <add> <add> public MultiColumnBuilder column() { <add> if (column != null) { <add> columns.add( column); <add> } <add> column = new SingleColumnBuilder(); <add> return this; <add> } <add> <add> } <add> <add> <ide> }
JavaScript
mit
0bd13e09210eb0917cf31d4312c65a4f9f341fcf
0
ptejada/ApePubSub,ptejada/ApePubSub
function APS( server, events, options ){ this.option = { 'poll': 25000, debug: false, session: true, connectionArgs: {}, server: server, transport: ["wb", "lp"], //transport: "lp", //Should be the default transport option for APE Server v1.1.1 secure: false, eventPush: false, addFrequency: true } this.identifier = "APS"; this.version = '1.5.2'; this.state = 0; this._events = {}; this.chl = 0; this.user = {}; this.pipes = {}; this.channels = {}; this.eQueue = {}; //Add Events if(!!events) this.on(events); //Update options if(!!options){ for(var opt in options){ this.option[opt] = options[opt]; } } //IE9 crap - log function fix if(navigator.appName == "Microsoft Internet Explorer"){ if(typeof window.console == "undefined"){ this.log = function(){}; }else{ this.log = function(){ if(this.option.debug == false) return; var args = Array.prototype.slice.call(arguments); args.unshift("["+this.identifier+"]"); window.console.log(args.join().replace(",","")); } } } this.session._client = this; return this; } /* * Handles the initial connection to the server */ APS.prototype.connect = function(args){ var fserver = this.option.server; function TransportError(e){ this.trigger("dead", [e]); this.transport.close(); } var cb = { 'onmessage': this.onMessage.bind(this), 'onerror': TransportError.bind(this) } if(this.state == 1) return this.log("Already Connected!"); var cmd = "CONNECT"; args = this.option.connectionArgs = args || this.option.connectionArgs; var restore = this.session.restore(); //increase frequency this.session.freq.change(parseInt(this.session.freq.value) + 1); //Handle sessions if(this.option.session == true){ if(typeof restore == "object"){ args = restore; //Change initial command CONNECT by RESTORE cmd = "RESTORE"; }else{ //Fresh Connect if(this.trigger("connect") == false) return false; } //Apply frequency to the server if(this.option.addFrequency) fserver = this.session.freq.value + "." + fserver; }else{ //Fresh Connect this.state = 0; //this.session.id = ""; if(this.trigger("connect") == false) return false; } //Handle transport if(!!this.transport){ if(this.transport.state == 0){ this.transport = new APS.transport(fserver, cb, this); }else{ //Use current active transport } }else{ this.transport = new APS.transport(fserver, cb, this); } //Send seleced command and arguments this.sendCmd(cmd, args); return this; } /* * Attempts to reconnect to the server */ APS.prototype.reconnect = function(){ if(this.state > 0 && this.transport.state > 0) return this.log("Client already connected!"); //Clear channels stack this.channels = {}; this.connect(); } /* * Fires events on object's _events stack */ APS.prototype.trigger = function(ev, args){ ev = ev.toLowerCase(); if(!(args instanceof Array)) args = [args]; //GLobal if("_client" in this){ for(var i in this._client._events[ev]){ if(this._client._events[ev].hasOwnProperty(i)){ this.log("{{{ " + ev + " }}}["+i+"] on client ", this._client); if(this._client._events[ev][i].apply(this, args) === false) return false; } } } //Local for(var i in this._events[ev]){ if(this._events[ev].hasOwnProperty(i)){ if(!this._client){ this.log("{{{ " + ev + " }}}["+i+"] on client ", this); }else{ this.log("{{{ " + ev + " }}}["+i+"] ", this); } if(this._events[ev][i].apply(this, args) === false) return false; } } return true; } /* * Use to handles events on all object */ APS.prototype.on = function(ev, fn){ var Events = []; if(typeof ev == 'string' && typeof fn == 'function'){ Events[ev] = fn; }else if(typeof ev == "object"){ Events = ev; }else{ return this; } for(var e in Events){ if(!Events.hasOwnProperty(e)) continue; var fn = Events[e]; e = e.toLowerCase(); if(!this._events[e]) this._events[e] = []; this._events[e].push(fn); } return this; } /* * Get any object by its unique pubid */ APS.prototype.getPipe = function(user){ if(typeof user == 'string'){ return this.pipes[user]; } else { return this.pipes[user.pubid]; } } /* * Sends an event throught a pipe/user/channel */ APS.prototype.send = function(pipe, $event, data, sync, callback){ this.sendCmd("Event", { event: $event, data: data, sync: sync }, pipe, callback); } /* * Internal method to wrap events and send them as commands to the server */ APS.prototype.sendCmd = function(cmd, args, pipe, callback){ var specialCmd = {CONNECT: 0, RESTORE:0}; if(this.state == 1 || cmd in specialCmd){ var tmp = { 'cmd': cmd, 'chl': this.chl, 'freq': this.session.freq.value } if(args) tmp.params = args; if(pipe) { tmp.params.pipe = typeof pipe == 'string' ? pipe : pipe.pubid; if(this.getPipe(tmp.params.pipe) instanceof APS.channel){ tmp.params.multi = true; }else{ tmp.params.multi = false; } } if(this.session.id) tmp.sessid = this.session.id; this.log('<<<< ', cmd.toUpperCase() , " >>>> ", tmp); if(typeof callback != "function") callback = function(){}; var data = []; try { data = JSON.stringify([tmp]); }catch(e){ this.log(e); this.log(data); } //Send command if(this.transport.send(data, callback, tmp) != "pushed"){ this.session.saveChl(); } } else { this.on('ready', this.sendCmd.bind(this, cmd, args)); } return this; } /* * Polls the server for information when using the Long Polling transport */ APS.prototype.poll = function(){ if(this.transport.id == 0){ clearTimeout(this.poller); this.poller = setTimeout(this.check.bind(this), this.option.poll); } } /* * Sends a check command to the server */ APS.prototype.check = function(force){ if(this.transport.id == 0 || !!force){ this.sendCmd('CHECK'); this.poll(); } } /* * Sends the QUIT command to the server and completely destroys the client instance */ APS.prototype.quit = function(){ this.sendCmd('QUIT'); this.transport.close(); this.trigger("dead"); //Clear session on 'quit' this.session.destroy(); this.state = 0; } /* * Subscribe to a channel */ APS.prototype.sub = function(channel, Events, callback){ //Handle the events if(typeof Events == "object"){ if(typeof channel == "object"){ for(var chan in channel){ this.onChannel(channel[chan], Events); } }else{ this.onChannel(channel, Events); } } //Handle callback if(typeof callback == "function"){ if(typeof channel == "object"){ for(var chan in channel){ this.onChannel(channel[chan], "joined", callback); } }else{ this.onChannel(channel, "joined", callback); } } //Join Channel if(this.state == 0){ this.on("ready", this.sub.bind(this, channel)); this.connect({user: this.user}); }else{ //Logic to only send the JOIN request to only non-existing channels in the client object if(typeof channel == "string"){ //Single Channel channel = channel.toLowerCase(); if(typeof this.channels[channel] != "object"){ this.sendCmd('JOIN', {'channels': channel}); } }else{ //Multi Channel var toJoin = []; for(var x in channel){ if(typeof this.channels[channel[x].toLowerCase()] != "object") toJoin.push(channel[x]); } if(toJoin.length > 0) this.sendCmd('JOIN', {'channels': toJoin}); } } return this; } /* * Publish data/message in a channel or to a user */ APS.prototype.pub = function(channel, data, sync, callback){ var pipe = this.getChannel(channel); if(!pipe && channel.length == 32) pipe = this.getPipe(channel); if(pipe){ var $event = typeof data == "string" ? "message" : "data"; if($event == "message") data = encodeURIComponent(data); pipe.send($event, data, sync, callback); }else{ this.log("NO Channel " + channel); } }; /* * Get a channel object by its name */ APS.prototype.getChannel = function(channel){ channel = channel.toLowerCase(); if(channel in this.channels){ return this.channels[channel]; } return false; } /* * Add events to a channel, even is user has not subscribed to it yet */ APS.prototype.onChannel = function(channel, Events, fn){ channel = channel.toLowerCase(); if(channel in this.channels){ this.channels[channel].on(Events, fn); return true; } if(typeof Events == "object"){ //add events to queue if(typeof this.eQueue[channel] != "object") this.eQueue[channel] = []; //this.eQueue[channel].push(Events); for(var $event in Events){ var fn = Events[$event]; this.eQueue[channel].push([$event, fn]); this.log("Adding ["+channel+"] event '"+$event+"' to queue"); } }else{ var xnew = Object(); xnew[Events] = fn; this.onChannel(channel,xnew); } } /* * Unsubscribe from a channel */ APS.prototype.unSub = function(channel){ if(channel == "") return; this.getChannel(channel).leave(); //Delete the Event Queue in case the channel is created again delete this.eQueue[channel]; } /* * Debug Function for Browsers console */ if(navigator.appName != "Microsoft Internet Explorer"){ APS.prototype.log = function(){ if(!this.option.debug) return; var args = Array.prototype.slice.call(arguments); args.unshift("["+this.identifier+"]"); window.console.log.apply(console, args); }; }
js/src/client.js
function APS( server, events, options ){ this.option = { 'poll': 25000, debug: false, session: true, connectionArgs: {}, server: server, transport: ["wb", "lp"], //transport: "lp", //Should be the default transport option for APE Server v1.1.1 secure: false, eventPush: false, addFrequency: true } this.identifier = "APS"; this.version = '1.5.2'; this.state = 0; this._events = {}; this.chl = 0; this.user = {}; this.pipes = {}; this.channels = {}; this.eQueue = {}; //Add Events if(!!events) this.on(events); //Update options if(!!options){ for(var opt in options){ this.option[opt] = options[opt]; } } //IE9 crap - log function fix if(navigator.appName == "Microsoft Internet Explorer"){ if(typeof window.console == "undefined"){ this.log = function(){}; }else{ this.log = function(){ if(this.option.debug == false) return; var args = Array.prototype.slice.call(arguments); args.unshift("["+this.identifier+"]"); window.console.log(args.join().replace(",","")); } } } this.session._client = this; return this; } /* * Handles the initial connection to the server */ APS.prototype.connect = function(args){ var fserver = this.option.server; function TransportError(e){ this.trigger("dead", [e]); this.transport.close(); } var cb = { 'onmessage': this.onMessage.bind(this), 'onerror': TransportError.bind(this) } if(this.state == 1) return this.log("Already Connected!"); var cmd = "CONNECT"; args = this.option.connectionArgs = args || this.option.connectionArgs; var restore = this.session.restore(); //increase frequency this.session.freq.change(parseInt(this.session.freq.value) + 1); //Handle sessions if(this.option.session == true){ if(typeof restore == "object"){ args = restore; //Change initial command CONNECT by RESTORE cmd = "RESTORE"; }else{ //Fresh Connect if(this.trigger("connect") == false) return false; } //Apply frequency to the server if(this.option.addFrequency) fserver = this.session.freq.value + "." + fserver; }else{ //Fresh Connect this.state = 0; //this.session.id = ""; if(this.trigger("connect") == false) return false; } //Handle transport if(!!this.transport){ if(this.transport.state == 0){ this.transport = new APS.transport(fserver, cb, this); }else{ //Use current active transport } }else{ this.transport = new APS.transport(fserver, cb, this); } //Send seleced command and arguments this.sendCmd(cmd, args); return this; } /* * Attempts to reconnect to the server */ APS.prototype.reconnect = function(){ if(this.state > 0 && this.transport.state > 0) return this.log("Client already connected!"); //Clear channels stack this.channels = {}; this.connect(); } /* * Fires events on object's _events stack */ APS.prototype.trigger = function(ev, args){ ev = ev.toLowerCase(); if(!(args instanceof Array)) args = [args]; //GLobal if("_client" in this){ for(var i in this._client._events[ev]){ if(this._client._events[ev].hasOwnProperty(i)){ this.log("{{{ " + ev + " }}}["+i+"] on client ", this._client); if(this._client._events[ev][i].apply(this, args) === false) return false; } } } //Local for(var i in this._events[ev]){ if(this._events[ev].hasOwnProperty(i)){ if(!this._client){ this.log("{{{ " + ev + " }}}["+i+"] on client ", this); }else{ this.log("{{{ " + ev + " }}}["+i+"] ", this); } if(this._events[ev][i].apply(this, args) === false) return false; } } return true; } /* * Use to handles events on all object */ APS.prototype.on = function(ev, fn){ var Events = []; if(typeof ev == 'string' && typeof fn == 'function'){ Events[ev] = fn; }else if(typeof ev == "object"){ Events = ev; }else{ return this; } for(var e in Events){ if(!Events.hasOwnProperty(e)) continue; var fn = Events[e]; e = e.toLowerCase(); if(!this._events[e]) this._events[e] = []; this._events[e].push(fn); } return this; } /* * Get any object by its unique pubid */ APS.prototype.getPipe = function(user){ if(typeof user == 'string'){ return this.pipes[user]; } else { return this.pipes[user.pubid]; } } /* * Sends an event throught a pipe/user/channel */ APS.prototype.send = function(pipe, $event, data, sync, callback){ this.sendCmd("Event", { event: $event, data: data, sync: sync }, pipe, callback); } /* * Internal method to wrap events and send them as commands to the server */ APS.prototype.sendCmd = function(cmd, args, pipe, callback){ var specialCmd = {CONNECT: 0, RESTORE:0}; if(this.state == 1 || cmd in specialCmd){ var tmp = { 'cmd': cmd, 'chl': this.chl, 'freq': this.session.freq.value } if(args) tmp.params = args; if(pipe) { tmp.params.pipe = typeof pipe == 'string' ? pipe : pipe.pubid; if(this.getPipe(tmp.params.pipe) instanceof APS.channel){ //tmp.params.multi = true; }else{ //tmp.params.multi = false; } } if(this.session.id) tmp.sessid = this.session.id; this.log('<<<< ', cmd.toUpperCase() , " >>>> ", tmp); if(typeof callback != "function") callback = function(){}; var data = []; try { data = JSON.stringify([tmp]); }catch(e){ this.log(e); this.log(data); } //Send command if(this.transport.send(data, callback, tmp) != "pushed"){ this.session.saveChl(); } } else { this.on('ready', this.sendCmd.bind(this, cmd, args)); } return this; } /* * Polls the server for information when using the Long Polling transport */ APS.prototype.poll = function(){ if(this.transport.id == 0){ clearTimeout(this.poller); this.poller = setTimeout(this.check.bind(this), this.option.poll); } } /* * Sends a check command to the server */ APS.prototype.check = function(force){ if(this.transport.id == 0 || !!force){ this.sendCmd('CHECK'); this.poll(); } } /* * Sends the QUIT command to the server and completely destroys the client instance */ APS.prototype.quit = function(){ this.sendCmd('QUIT'); this.transport.close(); this.trigger("dead"); //Clear session on 'quit' this.session.destroy(); this.state = 0; } /* * Subscribe to a channel */ APS.prototype.sub = function(channel, Events, callback){ //Handle the events if(typeof Events == "object"){ if(typeof channel == "object"){ for(var chan in channel){ this.onChannel(channel[chan], Events); } }else{ this.onChannel(channel, Events); } } //Handle callback if(typeof callback == "function"){ if(typeof channel == "object"){ for(var chan in channel){ this.onChannel(channel[chan], "joined", callback); } }else{ this.onChannel(channel, "joined", callback); } } //Join Channel if(this.state == 0){ this.on("ready", this.sub.bind(this, channel)); this.connect({user: this.user}); }else{ //Logic to only send the JOIN request to only non-existing channels in the client object if(typeof channel == "string"){ //Single Channel channel = channel.toLowerCase(); if(typeof this.channels[channel] != "object"){ this.sendCmd('JOIN', {'channels': channel}); } }else{ //Multi Channel var toJoin = []; for(var x in channel){ if(typeof this.channels[channel[x].toLowerCase()] != "object") toJoin.push(channel[x]); } if(toJoin.length > 0) this.sendCmd('JOIN', {'channels': toJoin}); } } return this; } /* * Publish data/message in a channel or to a user */ APS.prototype.pub = function(channel, data, sync, callback){ var pipe = this.getChannel(channel); if(!pipe && channel.length == 32) pipe = this.getPipe(channel); if(pipe){ var $event = typeof data == "string" ? "message" : "data"; if($event == "message") data = encodeURIComponent(data); pipe.send($event, data, sync, callback); }else{ this.log("NO Channel " + channel); } }; /* * Get a channel object by its name */ APS.prototype.getChannel = function(channel){ channel = channel.toLowerCase(); if(channel in this.channels){ return this.channels[channel]; } return false; } /* * Add events to a channel, even is user has not subscribed to it yet */ APS.prototype.onChannel = function(channel, Events, fn){ channel = channel.toLowerCase(); if(channel in this.channels){ this.channels[channel].on(Events, fn); return true; } if(typeof Events == "object"){ //add events to queue if(typeof this.eQueue[channel] != "object") this.eQueue[channel] = []; //this.eQueue[channel].push(Events); for(var $event in Events){ var fn = Events[$event]; this.eQueue[channel].push([$event, fn]); this.log("Adding ["+channel+"] event '"+$event+"' to queue"); } }else{ var xnew = Object(); xnew[Events] = fn; this.onChannel(channel,xnew); } } /* * Unsubscribe from a channel */ APS.prototype.unSub = function(channel){ if(channel == "") return; this.getChannel(channel).leave(); //Delete the Event Queue in case the channel is created again delete this.eQueue[channel]; } /* * Debug Function for Browsers console */ if(navigator.appName != "Microsoft Internet Explorer"){ APS.prototype.log = function(){ if(!this.option.debug) return; var args = Array.prototype.slice.call(arguments); args.unshift("["+this.identifier+"]"); window.console.log.apply(console, args); }; }
send an extra parameter `multi` to deremine weather th recpient is a channel or user
js/src/client.js
send an extra parameter `multi` to deremine weather th recpient is a channel or user
<ide><path>s/src/client.js <ide> if(pipe) { <ide> tmp.params.pipe = typeof pipe == 'string' ? pipe : pipe.pubid; <ide> if(this.getPipe(tmp.params.pipe) instanceof APS.channel){ <del> //tmp.params.multi = true; <add> tmp.params.multi = true; <ide> }else{ <del> //tmp.params.multi = false; <add> tmp.params.multi = false; <ide> } <ide> } <ide> if(this.session.id) tmp.sessid = this.session.id;
Java
lgpl-2.1
e0b4e2fb5b98557119a9584d31b341db16771860
0
levants/lightmare
package org.lightmare.ejb.interceptors; import java.lang.reflect.Method; import java.util.HashMap; import java.util.LinkedList; import java.util.Map; import java.util.Queue; import javax.ejb.Timer; import javax.interceptor.InvocationContext; import org.lightmare.utils.ObjectUtils; import org.lightmare.utils.reflect.MetaUtils; /** * Implementation of {@link InvocationContext} for EJB intercepter * * @author Levan * */ public class InvocationContextImpl implements InvocationContext { // Caches methods in order for proceed calls private Queue<Method> methods = new LinkedList<Method>(); // Parameters for intercepted method private Object[] parameters; // Caches object in order to proceed method calls in chain private Queue<Object> targets = new LinkedList<Object>(); private Map<String, Object> contextData = new HashMap<String, Object>(); private Timer timer; public InvocationContextImpl(Queue<Method> methods, Queue<Object> targets, Object[] parameters) { this.methods = methods; this.targets = targets; this.parameters = parameters; } public InvocationContextImpl(Queue<Method> methods, Queue<Object> targets, Object[] parameters, Timer timer) { this(methods, targets, parameters); this.timer = timer; } @Override public Object getTarget() { Object target = targets.peek(); return target; } @Override public Method getMethod() { return methods.peek(); } @Override public Object[] getParameters() { return parameters; } @Override public void setParameters(Object[] parameters) { this.parameters = parameters; } @Override public Map<String, Object> getContextData() { return contextData; } @Override public Object getTimer() { // TODO find out usage of this method and write implementation return timer; } @Override public Object proceed() throws Exception { Method method = methods.poll(); Object target = targets.poll(); Object value; if (ObjectUtils.notNull(method) && ObjectUtils.notNull(target)) { value = MetaUtils.invokePrivate(method, target, this); } else { value = null; } return value; } }
src/main/java/org/lightmare/ejb/interceptors/InvocationContextImpl.java
package org.lightmare.ejb.interceptors; import java.lang.reflect.Method; import java.util.HashMap; import java.util.LinkedList; import java.util.Map; import java.util.Queue; import javax.ejb.Timer; import javax.interceptor.InvocationContext; import org.lightmare.utils.ObjectUtils; import org.lightmare.utils.reflect.MetaUtils; /** * Implementation of {@link InvocationContext} for EJB intercepter * * @author Levan * */ public class InvocationContextImpl implements InvocationContext { // Caches methods in order for proceed calls private Queue<Method> methods = new LinkedList<Method>(); private Object[] parameters; // Caches object in order to proceed method calls in chain private Queue<Object> targets = new LinkedList<Object>(); private Map<String, Object> contextData = new HashMap<String, Object>(); private Timer timer; public InvocationContextImpl(Queue<Method> methods, Queue<Object> targets, Object[] parameters) { this.methods = methods; this.targets = targets; this.parameters = parameters; } public InvocationContextImpl(Queue<Method> methods, Queue<Object> targets, Object[] parameters, Timer timer) { this(methods, targets, parameters); this.timer = timer; } @Override public Object getTarget() { Object target = targets.peek(); return target; } @Override public Method getMethod() { return methods.peek(); } @Override public Object[] getParameters() { return parameters; } @Override public void setParameters(Object[] parameters) { this.parameters = parameters; } @Override public Map<String, Object> getContextData() { return contextData; } @Override public Object getTimer() { // TODO find out usage of this method and write implementation return timer; } @Override public Object proceed() throws Exception { Method method = methods.poll(); Object target = targets.poll(); Object value; if (ObjectUtils.notNull(method) && ObjectUtils.notNull(target)) { value = MetaUtils.invokePrivate(method, target, this); } else { value = null; } return value; } }
improved code / comments at utility classes
src/main/java/org/lightmare/ejb/interceptors/InvocationContextImpl.java
improved code / comments at utility classes
<ide><path>rc/main/java/org/lightmare/ejb/interceptors/InvocationContextImpl.java <ide> // Caches methods in order for proceed calls <ide> private Queue<Method> methods = new LinkedList<Method>(); <ide> <add> // Parameters for intercepted method <ide> private Object[] parameters; <ide> <ide> // Caches object in order to proceed method calls in chain
Java
apache-2.0
5a4f1a4d81585ef961763d3764686676c62949da
0
freiheit-com/wicket,mafulafunk/wicket,dashorst/wicket,topicusonderwijs/wicket,selckin/wicket,freiheit-com/wicket,aldaris/wicket,bitstorm/wicket,freiheit-com/wicket,AlienQueen/wicket,topicusonderwijs/wicket,dashorst/wicket,klopfdreh/wicket,bitstorm/wicket,astrapi69/wicket,bitstorm/wicket,dashorst/wicket,aldaris/wicket,selckin/wicket,AlienQueen/wicket,astrapi69/wicket,mosoft521/wicket,freiheit-com/wicket,apache/wicket,dashorst/wicket,freiheit-com/wicket,klopfdreh/wicket,bitstorm/wicket,bitstorm/wicket,aldaris/wicket,apache/wicket,topicusonderwijs/wicket,mosoft521/wicket,mosoft521/wicket,AlienQueen/wicket,apache/wicket,klopfdreh/wicket,mosoft521/wicket,AlienQueen/wicket,mosoft521/wicket,dashorst/wicket,apache/wicket,klopfdreh/wicket,aldaris/wicket,astrapi69/wicket,klopfdreh/wicket,topicusonderwijs/wicket,selckin/wicket,selckin/wicket,mafulafunk/wicket,aldaris/wicket,apache/wicket,mafulafunk/wicket,AlienQueen/wicket,topicusonderwijs/wicket,astrapi69/wicket,selckin/wicket
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.wicket.authroles.authorization.strategies.role; import java.util.HashSet; import org.apache.wicket.util.io.IClusterable; import org.apache.wicket.util.string.StringList; /** * Utility class for working with roles. * * @author Eelco Hillenius * @author Jonathan Locke */ public class Roles extends HashSet<String> implements IClusterable { private static final long serialVersionUID = 1L; /** USER role (for use in annotations) */ public static final String USER = "USER"; /** ADMIN role (for use in annotations) */ public static final String ADMIN = "ADMIN"; /** * Construct. */ public Roles() { } /** * Construct. * * @param roles * Roles as a comma separated list, like "ADMIN, USER" */ public Roles(final String roles) { for (final String role : roles.split("\\s*,\\s*")) { add(role); } } /** * Construct. * * @param roles * Roles */ public Roles(final String[] roles) { for (final String role : roles) { add(role); } } /** * Whether this roles object containes the provided role. * * @param role * the role to check * @return true if it contains the role, false otherwise */ public boolean hasRole(final String role) { if (role != null) { return contains(role); } return false; } /** * Whether this roles object contains any of the provided roles. * * @param roles * the roles to check * @return true if it contains any of the roles, false otherwise */ public boolean hasAnyRole(Roles roles) { if (roles != null) { for (String role : roles) { if (hasRole(role)) { return true; } } } return false; } /** * Whether this roles object contains all the provided roles. * * @param roles * the roles to check * @return true if it contains all the roles or the provided roles object is null, false * otherwise */ public boolean hasAllRoles(Roles roles) { if (roles != null) { for (String role : roles) { if (!hasRole(role)) { return false; } } } return true; } /** * @see java.lang.Object#toString() */ @Override public String toString() { return StringList.valueOf(this).join(); } }
wicket-auth-roles/src/main/java/org/apache/wicket/authroles/authorization/strategies/role/Roles.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.wicket.authroles.authorization.strategies.role; import java.util.HashSet; import org.apache.wicket.util.io.IClusterable; import org.apache.wicket.util.string.StringList; /** * Utility class for working with roles. * * @author Eelco Hillenius * @author Jonathan Locke */ public final class Roles extends HashSet<String> implements IClusterable { private static final long serialVersionUID = 1L; /** USER role (for use in annotations) */ public static final String USER = "USER"; /** ADMIN role (for use in annotations) */ public static final String ADMIN = "ADMIN"; /** * Construct. */ public Roles() { } /** * Construct. * * @param roles * Roles as a comma separated list, like "ADMIN, USER" */ public Roles(final String roles) { for (final String role : roles.split("\\s*,\\s*")) { add(role); } } /** * Construct. * * @param roles * Roles */ public Roles(final String[] roles) { for (final String role : roles) { add(role); } } /** * Whether this roles object containes the provided role. * * @param role * the role to check * @return true if it contains the role, false otherwise */ public boolean hasRole(final String role) { if (role != null) { return contains(role); } return false; } /** * Whether this roles object contains any of the provided roles. * * @param roles * the roles to check * @return true if it contains any of the roles, false otherwise */ public boolean hasAnyRole(Roles roles) { if (roles != null) { for (String role : roles) { if (hasRole(role)) { return true; } } } return false; } /** * Whether this roles object contains all the provided roles. * * @param roles * the roles to check * @return true if it contains all the roles or the provided roles object is null, false * otherwise */ public boolean hasAllRoles(Roles roles) { if (roles != null) { for (String role : roles) { if (!hasRole(role)) { return false; } } } return true; } /** * @see java.lang.Object#toString() */ @Override public String toString() { return StringList.valueOf(this).join(); } }
WICKET-5143 Create an interface for the roles replacing the current Roles class Make Roles class non-final
wicket-auth-roles/src/main/java/org/apache/wicket/authroles/authorization/strategies/role/Roles.java
WICKET-5143 Create an interface for the roles replacing the current Roles class
<ide><path>icket-auth-roles/src/main/java/org/apache/wicket/authroles/authorization/strategies/role/Roles.java <ide> * @author Eelco Hillenius <ide> * @author Jonathan Locke <ide> */ <del>public final class Roles extends HashSet<String> implements IClusterable <add>public class Roles extends HashSet<String> implements IClusterable <ide> { <ide> private static final long serialVersionUID = 1L; <ide>
Java
apache-2.0
5cbb81c64f16839f9f853d559e2fffe45b391b42
0
bijukunjummen/spring-boot,bjornlindstrom/spring-boot,tsachev/spring-boot,rajendra-chola/jenkins2-course-spring-boot,mbogoevici/spring-boot,ilayaperumalg/spring-boot,NetoDevel/spring-boot,tsachev/spring-boot,joansmith/spring-boot,thomasdarimont/spring-boot,philwebb/spring-boot,vpavic/spring-boot,donhuvy/spring-boot,neo4j-contrib/spring-boot,mosoft521/spring-boot,lburgazzoli/spring-boot,mrumpf/spring-boot,thomasdarimont/spring-boot,htynkn/spring-boot,olivergierke/spring-boot,javyzheng/spring-boot,bijukunjummen/spring-boot,kamilszymanski/spring-boot,joansmith/spring-boot,jxblum/spring-boot,mbenson/spring-boot,donhuvy/spring-boot,thomasdarimont/spring-boot,lucassaldanha/spring-boot,herau/spring-boot,lucassaldanha/spring-boot,NetoDevel/spring-boot,bclozel/spring-boot,nebhale/spring-boot,royclarkson/spring-boot,jmnarloch/spring-boot,neo4j-contrib/spring-boot,kdvolder/spring-boot,qerub/spring-boot,SaravananParthasarathy/SPSDemo,zhanhb/spring-boot,rajendra-chola/jenkins2-course-spring-boot,isopov/spring-boot,zhanhb/spring-boot,izeye/spring-boot,eddumelendez/spring-boot,michael-simons/spring-boot,lexandro/spring-boot,ilayaperumalg/spring-boot,lucassaldanha/spring-boot,joshiste/spring-boot,philwebb/spring-boot,rweisleder/spring-boot,sebastiankirsch/spring-boot,isopov/spring-boot,dfa1/spring-boot,jmnarloch/spring-boot,rweisleder/spring-boot,wilkinsona/spring-boot,rajendra-chola/jenkins2-course-spring-boot,Nowheresly/spring-boot,bbrouwer/spring-boot,shakuzen/spring-boot,jvz/spring-boot,rajendra-chola/jenkins2-course-spring-boot,DeezCashews/spring-boot,mbenson/spring-boot,RichardCSantana/spring-boot,RichardCSantana/spring-boot,jbovet/spring-boot,jvz/spring-boot,spring-projects/spring-boot,joansmith/spring-boot,philwebb/spring-boot,jbovet/spring-boot,ihoneymon/spring-boot,mbenson/spring-boot,neo4j-contrib/spring-boot,jbovet/spring-boot,bijukunjummen/spring-boot,jxblum/spring-boot,rweisleder/spring-boot,lexandro/spring-boot,chrylis/spring-boot,mbogoevici/spring-boot,zhanhb/spring-boot,wilkinsona/spring-boot,nebhale/spring-boot,jvz/spring-boot,lexandro/spring-boot,philwebb/spring-boot,Nowheresly/spring-boot,royclarkson/spring-boot,shakuzen/spring-boot,xiaoleiPENG/my-project,shakuzen/spring-boot,yhj630520/spring-boot,scottfrederick/spring-boot,sebastiankirsch/spring-boot,yhj630520/spring-boot,aahlenst/spring-boot,akmaharshi/jenkins,lenicliu/spring-boot,mrumpf/spring-boot,hello2009chen/spring-boot,isopov/spring-boot,aahlenst/spring-boot,jmnarloch/spring-boot,lenicliu/spring-boot,herau/spring-boot,xiaoleiPENG/my-project,hqrt/jenkins2-course-spring-boot,zhangshuangquan/spring-root,eddumelendez/spring-boot,brettwooldridge/spring-boot,zhangshuangquan/spring-root,habuma/spring-boot,akmaharshi/jenkins,Buzzardo/spring-boot,i007422/jenkins2-course-spring-boot,isopov/spring-boot,zhangshuangquan/spring-root,hqrt/jenkins2-course-spring-boot,ameraljovic/spring-boot,drumonii/spring-boot,shangyi0102/spring-boot,shangyi0102/spring-boot,afroje-reshma/spring-boot-sample,Buzzardo/spring-boot,shangyi0102/spring-boot,joansmith/spring-boot,kdvolder/spring-boot,sebastiankirsch/spring-boot,i007422/jenkins2-course-spring-boot,jvz/spring-boot,linead/spring-boot,bjornlindstrom/spring-boot,tiarebalbi/spring-boot,shangyi0102/spring-boot,christian-posta/spring-boot,rajendra-chola/jenkins2-course-spring-boot,sbuettner/spring-boot,ollie314/spring-boot,candrews/spring-boot,aahlenst/spring-boot,spring-projects/spring-boot,NetoDevel/spring-boot,izeye/spring-boot,candrews/spring-boot,neo4j-contrib/spring-boot,lburgazzoli/spring-boot,bbrouwer/spring-boot,sbcoba/spring-boot,cleverjava/jenkins2-course-spring-boot,mdeinum/spring-boot,hello2009chen/spring-boot,srikalyan/spring-boot,philwebb/spring-boot-concourse,jxblum/spring-boot,eddumelendez/spring-boot,javyzheng/spring-boot,vakninr/spring-boot,qerub/spring-boot,RichardCSantana/spring-boot,donhuvy/spring-boot,dreis2211/spring-boot,isopov/spring-boot,scottfrederick/spring-boot,ihoneymon/spring-boot,cleverjava/jenkins2-course-spring-boot,mdeinum/spring-boot,joshthornhill/spring-boot,philwebb/spring-boot,NetoDevel/spring-boot,bjornlindstrom/spring-boot,wwadge/spring-boot,wwadge/spring-boot,SaravananParthasarathy/SPSDemo,pvorb/spring-boot,nebhale/spring-boot,lucassaldanha/spring-boot,dreis2211/spring-boot,zhanhb/spring-boot,DeezCashews/spring-boot,eddumelendez/spring-boot,ameraljovic/spring-boot,philwebb/spring-boot-concourse,srikalyan/spring-boot,mevasaroj/jenkins2-course-spring-boot,deki/spring-boot,candrews/spring-boot,zhanhb/spring-boot,eddumelendez/spring-boot,philwebb/spring-boot-concourse,mevasaroj/jenkins2-course-spring-boot,joshthornhill/spring-boot,joansmith/spring-boot,kamilszymanski/spring-boot,mdeinum/spring-boot,tiarebalbi/spring-boot,dreis2211/spring-boot,christian-posta/spring-boot,wwadge/spring-boot,donhuvy/spring-boot,i007422/jenkins2-course-spring-boot,wilkinsona/spring-boot,vakninr/spring-boot,htynkn/spring-boot,isopov/spring-boot,cleverjava/jenkins2-course-spring-boot,mevasaroj/jenkins2-course-spring-boot,yangdd1205/spring-boot,drumonii/spring-boot,scottfrederick/spring-boot,thomasdarimont/spring-boot,ollie314/spring-boot,donhuvy/spring-boot,olivergierke/spring-boot,felipeg48/spring-boot,ameraljovic/spring-boot,bclozel/spring-boot,bijukunjummen/spring-boot,htynkn/spring-boot,linead/spring-boot,mosoft521/spring-boot,brettwooldridge/spring-boot,vpavic/spring-boot,joshiste/spring-boot,Nowheresly/spring-boot,joshthornhill/spring-boot,christian-posta/spring-boot,hqrt/jenkins2-course-spring-boot,i007422/jenkins2-course-spring-boot,herau/spring-boot,wilkinsona/spring-boot,brettwooldridge/spring-boot,bclozel/spring-boot,linead/spring-boot,vakninr/spring-boot,SaravananParthasarathy/SPSDemo,Buzzardo/spring-boot,minmay/spring-boot,aahlenst/spring-boot,Buzzardo/spring-boot,ptahchiev/spring-boot,shangyi0102/spring-boot,qerub/spring-boot,xiaoleiPENG/my-project,eddumelendez/spring-boot,bijukunjummen/spring-boot,yhj630520/spring-boot,hqrt/jenkins2-course-spring-boot,philwebb/spring-boot-concourse,joshiste/spring-boot,pvorb/spring-boot,deki/spring-boot,philwebb/spring-boot-concourse,ptahchiev/spring-boot,michael-simons/spring-boot,ollie314/spring-boot,kamilszymanski/spring-boot,linead/spring-boot,tiarebalbi/spring-boot,dreis2211/spring-boot,drumonii/spring-boot,thomasdarimont/spring-boot,dreis2211/spring-boot,htynkn/spring-boot,izeye/spring-boot,spring-projects/spring-boot,brettwooldridge/spring-boot,chrylis/spring-boot,philwebb/spring-boot,chrylis/spring-boot,RichardCSantana/spring-boot,lburgazzoli/spring-boot,sbuettner/spring-boot,nebhale/spring-boot,joshthornhill/spring-boot,zhangshuangquan/spring-root,habuma/spring-boot,yhj630520/spring-boot,jvz/spring-boot,dfa1/spring-boot,ihoneymon/spring-boot,ihoneymon/spring-boot,afroje-reshma/spring-boot-sample,mosoft521/spring-boot,yangdd1205/spring-boot,cleverjava/jenkins2-course-spring-boot,chrylis/spring-boot,michael-simons/spring-boot,qerub/spring-boot,neo4j-contrib/spring-boot,jxblum/spring-boot,spring-projects/spring-boot,chrylis/spring-boot,akmaharshi/jenkins,jayarampradhan/spring-boot,bbrouwer/spring-boot,akmaharshi/jenkins,lenicliu/spring-boot,spring-projects/spring-boot,bclozel/spring-boot,sbuettner/spring-boot,lenicliu/spring-boot,lucassaldanha/spring-boot,mosoft521/spring-boot,minmay/spring-boot,mbenson/spring-boot,hello2009chen/spring-boot,sebastiankirsch/spring-boot,kdvolder/spring-boot,habuma/spring-boot,sbcoba/spring-boot,wwadge/spring-boot,drumonii/spring-boot,joshiste/spring-boot,scottfrederick/spring-boot,afroje-reshma/spring-boot-sample,royclarkson/spring-boot,zhanhb/spring-boot,ollie314/spring-boot,qerub/spring-boot,xiaoleiPENG/my-project,kamilszymanski/spring-boot,mbogoevici/spring-boot,kdvolder/spring-boot,linead/spring-boot,pvorb/spring-boot,deki/spring-boot,javyzheng/spring-boot,michael-simons/spring-boot,felipeg48/spring-boot,vpavic/spring-boot,sbuettner/spring-boot,mdeinum/spring-boot,habuma/spring-boot,SaravananParthasarathy/SPSDemo,candrews/spring-boot,shakuzen/spring-boot,minmay/spring-boot,srikalyan/spring-boot,DeezCashews/spring-boot,felipeg48/spring-boot,shakuzen/spring-boot,izeye/spring-boot,RichardCSantana/spring-boot,dfa1/spring-boot,ptahchiev/spring-boot,izeye/spring-boot,jxblum/spring-boot,ameraljovic/spring-boot,Buzzardo/spring-boot,yhj630520/spring-boot,bjornlindstrom/spring-boot,joshiste/spring-boot,tiarebalbi/spring-boot,wwadge/spring-boot,rweisleder/spring-boot,bjornlindstrom/spring-boot,vpavic/spring-boot,mdeinum/spring-boot,brettwooldridge/spring-boot,habuma/spring-boot,srikalyan/spring-boot,mbogoevici/spring-boot,jbovet/spring-boot,dfa1/spring-boot,drumonii/spring-boot,sebastiankirsch/spring-boot,ollie314/spring-boot,royclarkson/spring-boot,habuma/spring-boot,tiarebalbi/spring-boot,minmay/spring-boot,ilayaperumalg/spring-boot,jayarampradhan/spring-boot,sbuettner/spring-boot,bbrouwer/spring-boot,lenicliu/spring-boot,pvorb/spring-boot,ameraljovic/spring-boot,herau/spring-boot,mosoft521/spring-boot,scottfrederick/spring-boot,xiaoleiPENG/my-project,ihoneymon/spring-boot,drumonii/spring-boot,rweisleder/spring-boot,ptahchiev/spring-boot,hello2009chen/spring-boot,mrumpf/spring-boot,felipeg48/spring-boot,wilkinsona/spring-boot,lexandro/spring-boot,ilayaperumalg/spring-boot,sbcoba/spring-boot,srikalyan/spring-boot,tsachev/spring-boot,jmnarloch/spring-boot,pvorb/spring-boot,dreis2211/spring-boot,lexandro/spring-boot,yangdd1205/spring-boot,vakninr/spring-boot,tsachev/spring-boot,bclozel/spring-boot,javyzheng/spring-boot,SaravananParthasarathy/SPSDemo,DeezCashews/spring-boot,deki/spring-boot,kamilszymanski/spring-boot,NetoDevel/spring-boot,hello2009chen/spring-boot,sbcoba/spring-boot,jayarampradhan/spring-boot,candrews/spring-boot,royclarkson/spring-boot,mrumpf/spring-boot,DeezCashews/spring-boot,rweisleder/spring-boot,ptahchiev/spring-boot,scottfrederick/spring-boot,vpavic/spring-boot,kdvolder/spring-boot,kdvolder/spring-boot,vakninr/spring-boot,ilayaperumalg/spring-boot,tiarebalbi/spring-boot,felipeg48/spring-boot,joshthornhill/spring-boot,aahlenst/spring-boot,jmnarloch/spring-boot,michael-simons/spring-boot,mrumpf/spring-boot,dfa1/spring-boot,tsachev/spring-boot,spring-projects/spring-boot,michael-simons/spring-boot,mbenson/spring-boot,mdeinum/spring-boot,christian-posta/spring-boot,afroje-reshma/spring-boot-sample,sbcoba/spring-boot,ihoneymon/spring-boot,Buzzardo/spring-boot,ptahchiev/spring-boot,minmay/spring-boot,hqrt/jenkins2-course-spring-boot,herau/spring-boot,jbovet/spring-boot,ilayaperumalg/spring-boot,deki/spring-boot,donhuvy/spring-boot,joshiste/spring-boot,wilkinsona/spring-boot,afroje-reshma/spring-boot-sample,htynkn/spring-boot,mbenson/spring-boot,lburgazzoli/spring-boot,bbrouwer/spring-boot,jayarampradhan/spring-boot,mbogoevici/spring-boot,jayarampradhan/spring-boot,nebhale/spring-boot,chrylis/spring-boot,htynkn/spring-boot,jxblum/spring-boot,felipeg48/spring-boot,olivergierke/spring-boot,javyzheng/spring-boot,lburgazzoli/spring-boot,vpavic/spring-boot,aahlenst/spring-boot,mevasaroj/jenkins2-course-spring-boot,zhangshuangquan/spring-root,i007422/jenkins2-course-spring-boot,olivergierke/spring-boot,olivergierke/spring-boot,cleverjava/jenkins2-course-spring-boot,Nowheresly/spring-boot,Nowheresly/spring-boot,bclozel/spring-boot,mevasaroj/jenkins2-course-spring-boot,akmaharshi/jenkins,christian-posta/spring-boot,shakuzen/spring-boot,tsachev/spring-boot
/* * Copyright 2012-2015 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.boot.actuate.autoconfigure; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.LinkedHashSet; import java.util.List; import java.util.Set; import javax.annotation.PostConstruct; import javax.servlet.http.HttpServletRequest; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.actuate.endpoint.Endpoint; import org.springframework.boot.actuate.endpoint.mvc.EndpointHandlerMapping; import org.springframework.boot.actuate.endpoint.mvc.MvcEndpoint; import org.springframework.boot.autoconfigure.AutoConfigureAfter; import org.springframework.boot.autoconfigure.AutoConfigureBefore; import org.springframework.boot.autoconfigure.EnableAutoConfiguration; import org.springframework.boot.autoconfigure.condition.ConditionOutcome; import org.springframework.boot.autoconfigure.condition.ConditionalOnClass; import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean; import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; import org.springframework.boot.autoconfigure.condition.ConditionalOnWebApplication; import org.springframework.boot.autoconfigure.condition.SpringBootCondition; import org.springframework.boot.autoconfigure.security.AuthenticationManagerConfiguration; import org.springframework.boot.autoconfigure.security.FallbackWebSecurityAutoConfiguration; import org.springframework.boot.autoconfigure.security.SecurityAutoConfiguration; import org.springframework.boot.autoconfigure.security.SecurityPrerequisite; import org.springframework.boot.autoconfigure.security.SecurityProperties; import org.springframework.boot.autoconfigure.security.SpringBootWebSecurityConfiguration; import org.springframework.boot.autoconfigure.web.ErrorController; import org.springframework.boot.autoconfigure.web.ServerProperties; import org.springframework.boot.context.properties.EnableConfigurationProperties; import org.springframework.context.ApplicationContext; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.ConditionContext; import org.springframework.context.annotation.Conditional; import org.springframework.context.annotation.Configuration; import org.springframework.core.annotation.Order; import org.springframework.core.type.AnnotatedTypeMetadata; import org.springframework.security.config.annotation.web.WebSecurityConfigurer; import org.springframework.security.config.annotation.web.builders.HttpSecurity; import org.springframework.security.config.annotation.web.builders.WebSecurity; import org.springframework.security.config.annotation.web.builders.WebSecurity.IgnoredRequestConfigurer; import org.springframework.security.config.annotation.web.configuration.EnableWebSecurity; import org.springframework.security.config.annotation.web.configuration.WebSecurityConfiguration; import org.springframework.security.config.annotation.web.configuration.WebSecurityConfigurerAdapter; import org.springframework.security.config.annotation.web.configurers.ExpressionUrlAuthorizationConfigurer; import org.springframework.security.web.AuthenticationEntryPoint; import org.springframework.security.web.authentication.www.BasicAuthenticationEntryPoint; import org.springframework.security.web.util.matcher.AntPathRequestMatcher; import org.springframework.security.web.util.matcher.AnyRequestMatcher; import org.springframework.security.web.util.matcher.OrRequestMatcher; import org.springframework.security.web.util.matcher.RequestMatcher; import org.springframework.util.StringUtils; /** * {@link EnableAutoConfiguration Auto-configuration} for security of framework endpoints. * Many aspects of the behavior can be controller with {@link ManagementServerProperties} * via externalized application properties (or via an bean definition of that type to set * the defaults). * <p> * The framework {@link Endpoint}s (used to expose application information to operations) * include a {@link Endpoint#isSensitive() sensitive} configuration option which will be * used as a security hint by the filter created here. * * @author Dave Syer * @author Andy Wilkinson */ @Configuration @ConditionalOnWebApplication @ConditionalOnClass({ EnableWebSecurity.class }) @AutoConfigureAfter(SecurityAutoConfiguration.class) @AutoConfigureBefore(FallbackWebSecurityAutoConfiguration.class) @EnableConfigurationProperties public class ManagementWebSecurityAutoConfiguration { private static final String[] NO_PATHS = new String[0]; @Bean @ConditionalOnMissingBean({ IgnoredPathsWebSecurityConfigurerAdapter.class }) public IgnoredPathsWebSecurityConfigurerAdapter ignoredPathsWebSecurityConfigurerAdapter() { return new IgnoredPathsWebSecurityConfigurerAdapter(); } @Configuration protected static class ManagementSecurityPropertiesConfiguration implements SecurityPrerequisite { @Autowired(required = false) private SecurityProperties security; @Autowired(required = false) private ManagementServerProperties management; @PostConstruct public void init() { if (this.management != null && this.security != null) { this.security.getUser().getRole() .add(this.management.getSecurity().getRole()); } } } // Get the ignored paths in early @Order(SecurityProperties.IGNORED_ORDER + 1) private static class IgnoredPathsWebSecurityConfigurerAdapter implements WebSecurityConfigurer<WebSecurity> { @Autowired(required = false) private ErrorController errorController; @Autowired(required = false) private EndpointHandlerMapping endpointHandlerMapping; @Autowired private ManagementServerProperties management; @Autowired private SecurityProperties security; @Autowired(required = false) private ServerProperties server; @Override public void configure(WebSecurity builder) throws Exception { } @Override public void init(WebSecurity builder) throws Exception { IgnoredRequestConfigurer ignoring = builder.ignoring(); // The ignores are not cumulative, so to prevent overwriting the defaults we // add them back. List<String> ignored = SpringBootWebSecurityConfiguration .getIgnored(this.security); if (!this.management.getSecurity().isEnabled()) { ignored.addAll(Arrays.asList(EndpointPaths.ALL .getPaths(this.endpointHandlerMapping))); } if (ignored.contains("none")) { ignored.remove("none"); } if (this.errorController != null) { ignored.add(normalizePath(this.errorController.getErrorPath())); } if (this.server != null) { String[] paths = this.server.getPathsArray(ignored); ignoring.antMatchers(paths); } } private String normalizePath(String errorPath) { String result = StringUtils.cleanPath(errorPath); if (!result.startsWith("/")) { result = "/" + result; } return result; } } @Configuration @ConditionalOnMissingBean(WebSecurityConfiguration.class) @Conditional(WebSecurityEnablerCondition.class) @EnableWebSecurity protected static class WebSecurityEnabler extends AuthenticationManagerConfiguration { } /** * WebSecurityEnabler condition. */ static class WebSecurityEnablerCondition extends SpringBootCondition { @Override public ConditionOutcome getMatchOutcome(ConditionContext context, AnnotatedTypeMetadata metadata) { String managementEnabled = context.getEnvironment().getProperty( "management.security.enabled", "true"); String basicEnabled = context.getEnvironment().getProperty( "security.basic.enabled", "true"); return new ConditionOutcome("true".equalsIgnoreCase(managementEnabled) && !"true".equalsIgnoreCase(basicEnabled), "Management security enabled and basic disabled"); } } @Configuration @ConditionalOnMissingBean({ ManagementWebSecurityConfigurerAdapter.class }) @ConditionalOnProperty(prefix = "management.security", name = "enabled", matchIfMissing = true) @Order(ManagementServerProperties.BASIC_AUTH_ORDER) protected static class ManagementWebSecurityConfigurerAdapter extends WebSecurityConfigurerAdapter { @Autowired private SecurityProperties security; @Autowired private ManagementServerProperties management; @Autowired(required = false) private ManagementContextResolver contextResolver; @Autowired(required = false) private ServerProperties server; @Autowired(required = false) private EndpointHandlerMapping endpointHandlerMapping; public void setEndpointHandlerMapping( EndpointHandlerMapping endpointHandlerMapping) { this.endpointHandlerMapping = endpointHandlerMapping; } protected final EndpointHandlerMapping getRequiredEndpointHandlerMapping() { if (this.endpointHandlerMapping == null) { ApplicationContext context = (this.contextResolver == null ? null : this.contextResolver.getApplicationContext()); if (context != null && context.getBeanNamesForType(EndpointHandlerMapping.class).length > 0) { this.endpointHandlerMapping = context .getBean(EndpointHandlerMapping.class); } if (this.endpointHandlerMapping == null) { this.endpointHandlerMapping = new EndpointHandlerMapping( Collections.<MvcEndpoint>emptySet()); } } return this.endpointHandlerMapping; } @Override protected void configure(HttpSecurity http) throws Exception { // secure endpoints RequestMatcher matcher = getRequestMatcher(); if (matcher != null) { // Always protect them if present if (this.security.isRequireSsl()) { http.requiresChannel().anyRequest().requiresSecure(); } AuthenticationEntryPoint entryPoint = entryPoint(); http.exceptionHandling().authenticationEntryPoint(entryPoint); // Match all the requests for actuator endpoints ... http.requestMatcher(matcher); // ... but permitAll() for the non-sensitive ones configurePermittedRequests(http.authorizeRequests()); http.httpBasic().authenticationEntryPoint(entryPoint); // No cookies for management endpoints by default http.csrf().disable(); http.sessionManagement().sessionCreationPolicy( this.management.getSecurity().getSessions()); SpringBootWebSecurityConfiguration.configureHeaders(http.headers(), this.security.getHeaders()); } } private RequestMatcher getRequestMatcher() { if (!this.management.getSecurity().isEnabled()) { return null; } String path = this.management.getContextPath(); if (StringUtils.hasText(path)) { AntPathRequestMatcher matcher = new AntPathRequestMatcher( this.server.getPath(path) + "/**"); return matcher; } // Match everything, including the sensitive and non-sensitive paths return new EndpointPathRequestMatcher(EndpointPaths.ALL); } private AuthenticationEntryPoint entryPoint() { BasicAuthenticationEntryPoint entryPoint = new BasicAuthenticationEntryPoint(); entryPoint.setRealmName(this.security.getBasic().getRealm()); return entryPoint; } private void configurePermittedRequests( ExpressionUrlAuthorizationConfigurer<HttpSecurity>.ExpressionInterceptUrlRegistry requests) { // Permit access to the non-sensitive endpoints requests.requestMatchers( new EndpointPathRequestMatcher(EndpointPaths.NON_SENSITIVE)) .permitAll(); // Restrict the rest to the configured role requests.anyRequest().hasRole(this.management.getSecurity().getRole()); } private final class EndpointPathRequestMatcher implements RequestMatcher { private final EndpointPaths endpointPaths; private RequestMatcher delegate; EndpointPathRequestMatcher(EndpointPaths endpointPaths) { this.endpointPaths = endpointPaths; } @Override public boolean matches(HttpServletRequest request) { if (this.delegate == null) { this.delegate = createDelegate(); } return this.delegate.matches(request); } private RequestMatcher createDelegate() { ServerProperties server = ManagementWebSecurityConfigurerAdapter.this.server; List<RequestMatcher> matchers = new ArrayList<RequestMatcher>(); EndpointHandlerMapping endpointHandlerMapping = ManagementWebSecurityConfigurerAdapter.this .getRequiredEndpointHandlerMapping(); for (String path : this.endpointPaths.getPaths(endpointHandlerMapping)) { matchers.add(new AntPathRequestMatcher(server.getPath(path))); } return (matchers.isEmpty() ? AnyRequestMatcher.INSTANCE : new OrRequestMatcher(matchers)); } } } private enum EndpointPaths { ALL, NON_SENSITIVE { @Override protected boolean isIncluded(MvcEndpoint endpoint) { return !endpoint.isSensitive(); } }; public String[] getPaths(EndpointHandlerMapping endpointHandlerMapping) { if (endpointHandlerMapping == null) { return NO_PATHS; } Set<? extends MvcEndpoint> endpoints = endpointHandlerMapping.getEndpoints(); Set<String> paths = new LinkedHashSet<String>(endpoints.size()); for (MvcEndpoint endpoint : endpoints) { if (isIncluded(endpoint)) { String path = endpointHandlerMapping.getPath(endpoint.getPath()); paths.add(path); if (!path.equals("")) { if (endpoint.isSensitive()) { // Ensure that nested paths are secured paths.add(path + "/**"); // Add Spring MVC-generated additional paths paths.add(path + ".*"); } } paths.add(path + "/"); } } return paths.toArray(new String[paths.size()]); } protected boolean isIncluded(MvcEndpoint endpoint) { return true; } } }
spring-boot-actuator/src/main/java/org/springframework/boot/actuate/autoconfigure/ManagementWebSecurityAutoConfiguration.java
/* * Copyright 2012-2015 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.boot.actuate.autoconfigure; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.LinkedHashSet; import java.util.List; import java.util.Set; import javax.annotation.PostConstruct; import javax.servlet.http.HttpServletRequest; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.actuate.endpoint.Endpoint; import org.springframework.boot.actuate.endpoint.mvc.EndpointHandlerMapping; import org.springframework.boot.actuate.endpoint.mvc.MvcEndpoint; import org.springframework.boot.autoconfigure.AutoConfigureAfter; import org.springframework.boot.autoconfigure.AutoConfigureBefore; import org.springframework.boot.autoconfigure.EnableAutoConfiguration; import org.springframework.boot.autoconfigure.condition.ConditionOutcome; import org.springframework.boot.autoconfigure.condition.ConditionalOnClass; import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean; import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; import org.springframework.boot.autoconfigure.condition.ConditionalOnWebApplication; import org.springframework.boot.autoconfigure.condition.SpringBootCondition; import org.springframework.boot.autoconfigure.security.AuthenticationManagerConfiguration; import org.springframework.boot.autoconfigure.security.FallbackWebSecurityAutoConfiguration; import org.springframework.boot.autoconfigure.security.SecurityAutoConfiguration; import org.springframework.boot.autoconfigure.security.SecurityPrerequisite; import org.springframework.boot.autoconfigure.security.SecurityProperties; import org.springframework.boot.autoconfigure.security.SpringBootWebSecurityConfiguration; import org.springframework.boot.autoconfigure.web.ErrorController; import org.springframework.boot.autoconfigure.web.ServerProperties; import org.springframework.boot.context.properties.EnableConfigurationProperties; import org.springframework.context.ApplicationContext; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.ConditionContext; import org.springframework.context.annotation.Conditional; import org.springframework.context.annotation.Configuration; import org.springframework.core.annotation.Order; import org.springframework.core.type.AnnotatedTypeMetadata; import org.springframework.security.config.annotation.web.WebSecurityConfigurer; import org.springframework.security.config.annotation.web.builders.HttpSecurity; import org.springframework.security.config.annotation.web.builders.WebSecurity; import org.springframework.security.config.annotation.web.builders.WebSecurity.IgnoredRequestConfigurer; import org.springframework.security.config.annotation.web.configuration.EnableWebSecurity; import org.springframework.security.config.annotation.web.configuration.WebSecurityConfiguration; import org.springframework.security.config.annotation.web.configuration.WebSecurityConfigurerAdapter; import org.springframework.security.config.annotation.web.configurers.ExpressionUrlAuthorizationConfigurer; import org.springframework.security.web.AuthenticationEntryPoint; import org.springframework.security.web.authentication.www.BasicAuthenticationEntryPoint; import org.springframework.security.web.util.matcher.AntPathRequestMatcher; import org.springframework.security.web.util.matcher.AnyRequestMatcher; import org.springframework.security.web.util.matcher.OrRequestMatcher; import org.springframework.security.web.util.matcher.RequestMatcher; import org.springframework.util.StringUtils; /** * {@link EnableAutoConfiguration Auto-configuration} for security of framework endpoints. * Many aspects of the behavior can be controller with {@link ManagementServerProperties} * via externalized application properties (or via an bean definition of that type to set * the defaults). * <p> * The framework {@link Endpoint}s (used to expose application information to operations) * include a {@link Endpoint#isSensitive() sensitive} configuration option which will be * used as a security hint by the filter created here. * * @author Dave Syer * @author Andy Wilkinson */ @Configuration @ConditionalOnWebApplication @ConditionalOnClass({ EnableWebSecurity.class }) @AutoConfigureAfter(SecurityAutoConfiguration.class) @AutoConfigureBefore(FallbackWebSecurityAutoConfiguration.class) @EnableConfigurationProperties public class ManagementWebSecurityAutoConfiguration { private static final String[] NO_PATHS = new String[0]; @Bean @ConditionalOnMissingBean({ IgnoredPathsWebSecurityConfigurerAdapter.class }) public IgnoredPathsWebSecurityConfigurerAdapter ignoredPathsWebSecurityConfigurerAdapter() { return new IgnoredPathsWebSecurityConfigurerAdapter(); } @Configuration protected static class ManagementSecurityPropertiesConfiguration implements SecurityPrerequisite { @Autowired(required = false) private SecurityProperties security; @Autowired(required = false) private ManagementServerProperties management; @PostConstruct public void init() { if (this.management != null && this.security != null) { this.security.getUser().getRole() .add(this.management.getSecurity().getRole()); } } } // Get the ignored paths in early @Order(SecurityProperties.IGNORED_ORDER + 1) private static class IgnoredPathsWebSecurityConfigurerAdapter implements WebSecurityConfigurer<WebSecurity> { @Autowired(required = false) private ErrorController errorController; @Autowired(required = false) private EndpointHandlerMapping endpointHandlerMapping; @Autowired private ManagementServerProperties management; @Autowired private SecurityProperties security; @Autowired(required = false) private ServerProperties server; @Override public void configure(WebSecurity builder) throws Exception { } @Override public void init(WebSecurity builder) throws Exception { IgnoredRequestConfigurer ignoring = builder.ignoring(); // The ignores are not cumulative, so to prevent overwriting the defaults we // add them back. List<String> ignored = SpringBootWebSecurityConfiguration .getIgnored(this.security); if (!this.management.getSecurity().isEnabled()) { ignored.addAll(Arrays.asList(EndpointPaths .get(this.endpointHandlerMapping))); } if (ignored.contains("none")) { ignored.remove("none"); } if (this.errorController != null) { ignored.add(normalizePath(this.errorController.getErrorPath())); } if (this.server != null) { String[] paths = this.server.getPathsArray(ignored); ignoring.antMatchers(paths); } } private String normalizePath(String errorPath) { String result = StringUtils.cleanPath(errorPath); if (!result.startsWith("/")) { result = "/" + result; } return result; } } @Configuration @ConditionalOnMissingBean(WebSecurityConfiguration.class) @Conditional(WebSecurityEnablerCondition.class) @EnableWebSecurity protected static class WebSecurityEnabler extends AuthenticationManagerConfiguration { } /** * WebSecurityEnabler condition. */ static class WebSecurityEnablerCondition extends SpringBootCondition { @Override public ConditionOutcome getMatchOutcome(ConditionContext context, AnnotatedTypeMetadata metadata) { String managementEnabled = context.getEnvironment().getProperty( "management.security.enabled", "true"); String basicEnabled = context.getEnvironment().getProperty( "security.basic.enabled", "true"); return new ConditionOutcome("true".equalsIgnoreCase(managementEnabled) && !"true".equalsIgnoreCase(basicEnabled), "Management security enabled and basic disabled"); } } @Configuration @ConditionalOnMissingBean({ ManagementWebSecurityConfigurerAdapter.class }) @ConditionalOnProperty(prefix = "management.security", name = "enabled", matchIfMissing = true) @Order(ManagementServerProperties.BASIC_AUTH_ORDER) protected static class ManagementWebSecurityConfigurerAdapter extends WebSecurityConfigurerAdapter { @Autowired private SecurityProperties security; @Autowired private ManagementServerProperties management; @Autowired(required = false) private ManagementContextResolver contextResolver; @Autowired(required = false) private ServerProperties server; @Autowired(required = false) private EndpointHandlerMapping endpointHandlerMapping; public void setEndpointHandlerMapping( EndpointHandlerMapping endpointHandlerMapping) { this.endpointHandlerMapping = endpointHandlerMapping; } protected final void deduceEndpointHandlerMappingIfMissing() { if (this.endpointHandlerMapping == null) { ApplicationContext context = (this.contextResolver == null ? null : this.contextResolver.getApplicationContext()); if (context != null && context.getBeanNamesForType(EndpointHandlerMapping.class).length > 0) { this.endpointHandlerMapping = context .getBean(EndpointHandlerMapping.class); } if (this.endpointHandlerMapping == null) { this.endpointHandlerMapping = new EndpointHandlerMapping( Collections.<MvcEndpoint>emptySet()); } } } @Override protected void configure(HttpSecurity http) throws Exception { // secure endpoints RequestMatcher matcher = getRequestMatcher(); if (matcher != null) { // Always protect them if present if (this.security.isRequireSsl()) { http.requiresChannel().anyRequest().requiresSecure(); } AuthenticationEntryPoint entryPoint = entryPoint(); http.exceptionHandling().authenticationEntryPoint(entryPoint); // Match all the requests for actuator endpoints ... http.requestMatcher(matcher); // ... but permitAll() for the non-sensitive ones configurePermittedRequests(http.authorizeRequests()); http.httpBasic().authenticationEntryPoint(entryPoint); // No cookies for management endpoints by default http.csrf().disable(); http.sessionManagement().sessionCreationPolicy( this.management.getSecurity().getSessions()); SpringBootWebSecurityConfiguration.configureHeaders(http.headers(), this.security.getHeaders()); } } private RequestMatcher getRequestMatcher() { if (!this.management.getSecurity().isEnabled()) { return null; } String path = this.management.getContextPath(); if (StringUtils.hasText(path)) { AntPathRequestMatcher matcher = new AntPathRequestMatcher( this.server.getPath(path) + "/**"); return matcher; } // Match everything, including the sensitive and non-sensitive paths return new EndpointPathRequestMatcher( EndpointPaths.get(this.endpointHandlerMapping)); } private AuthenticationEntryPoint entryPoint() { BasicAuthenticationEntryPoint entryPoint = new BasicAuthenticationEntryPoint(); entryPoint.setRealmName(this.security.getBasic().getRealm()); return entryPoint; } private void configurePermittedRequests( ExpressionUrlAuthorizationConfigurer<HttpSecurity>.ExpressionInterceptUrlRegistry requests) { // Permit access to the non-sensitive endpoints requests.requestMatchers( new EndpointPathRequestMatcher(EndpointPaths.get( this.endpointHandlerMapping, false))).permitAll(); // Restrict the rest to the configured role requests.anyRequest().hasRole(this.management.getSecurity().getRole()); } private final class EndpointPathRequestMatcher implements RequestMatcher { private RequestMatcher delegate; private String[] paths; EndpointPathRequestMatcher(String[] paths) { this.paths = paths; } @Override public boolean matches(HttpServletRequest request) { ManagementWebSecurityConfigurerAdapter.this .deduceEndpointHandlerMappingIfMissing(); if (this.delegate == null) { this.delegate = createDelegate(); } return this.delegate.matches(request); } private RequestMatcher createDelegate() { ServerProperties server = ManagementWebSecurityConfigurerAdapter.this.server; List<RequestMatcher> matchers = new ArrayList<RequestMatcher>(); for (String path : this.paths) { matchers.add(new AntPathRequestMatcher(server.getPath(path))); } return (matchers.isEmpty() ? AnyRequestMatcher.INSTANCE : new OrRequestMatcher(matchers)); } } } /** * Helper class for extracting lists of paths from the EndpointHandlerMapping. */ private static class EndpointPaths { /** * Get all the paths (sensitive and unsensitive). * * @param endpointHandlerMapping the mapping * @return all paths */ public static String[] get(EndpointHandlerMapping endpointHandlerMapping) { String[] insecure = get(endpointHandlerMapping, false); String[] secure = get(endpointHandlerMapping, true); return StringUtils.mergeStringArrays(insecure, secure); } /** * Get all the paths that are either sensitive or unsensitive. * * @param endpointHandlerMapping the mapping * @param secure flag to say if we want the secure ones * @return the relevant paths */ public static String[] get(EndpointHandlerMapping endpointHandlerMapping, boolean secure) { if (endpointHandlerMapping == null) { return NO_PATHS; } Set<? extends MvcEndpoint> endpoints = endpointHandlerMapping.getEndpoints(); Set<String> paths = new LinkedHashSet<String>(endpoints.size()); for (MvcEndpoint endpoint : endpoints) { if (endpoint.isSensitive() == secure) { String path = endpointHandlerMapping.getPath(endpoint.getPath()); paths.add(path); if (!path.equals("")) { if (secure) { // Ensure that nested paths are secured paths.add(path + "/**"); // Add Spring MVC-generated additional paths paths.add(path + ".*"); } } paths.add(path + "/"); } } return paths.toArray(new String[paths.size()]); } } }
Fix management security when using different port Update ManagementWebSecurityAutoConfiguration to reinstate lazy creation of EndpointHandlerMapping from the EndpointPathRequestMatcher. Fixes a regression introduced in eb2984781 and picked up my one of the sample integration tests. Fixes gh-4059
spring-boot-actuator/src/main/java/org/springframework/boot/actuate/autoconfigure/ManagementWebSecurityAutoConfiguration.java
Fix management security when using different port
<ide><path>pring-boot-actuator/src/main/java/org/springframework/boot/actuate/autoconfigure/ManagementWebSecurityAutoConfiguration.java <ide> List<String> ignored = SpringBootWebSecurityConfiguration <ide> .getIgnored(this.security); <ide> if (!this.management.getSecurity().isEnabled()) { <del> ignored.addAll(Arrays.asList(EndpointPaths <del> .get(this.endpointHandlerMapping))); <add> ignored.addAll(Arrays.asList(EndpointPaths.ALL <add> .getPaths(this.endpointHandlerMapping))); <ide> } <ide> if (ignored.contains("none")) { <ide> ignored.remove("none"); <ide> this.endpointHandlerMapping = endpointHandlerMapping; <ide> } <ide> <del> protected final void deduceEndpointHandlerMappingIfMissing() { <add> protected final EndpointHandlerMapping getRequiredEndpointHandlerMapping() { <ide> if (this.endpointHandlerMapping == null) { <ide> ApplicationContext context = (this.contextResolver == null ? null <ide> : this.contextResolver.getApplicationContext()); <ide> Collections.<MvcEndpoint>emptySet()); <ide> } <ide> } <add> return this.endpointHandlerMapping; <ide> } <ide> <ide> @Override <ide> return matcher; <ide> } <ide> // Match everything, including the sensitive and non-sensitive paths <del> return new EndpointPathRequestMatcher( <del> EndpointPaths.get(this.endpointHandlerMapping)); <add> return new EndpointPathRequestMatcher(EndpointPaths.ALL); <ide> } <ide> <ide> private AuthenticationEntryPoint entryPoint() { <ide> ExpressionUrlAuthorizationConfigurer<HttpSecurity>.ExpressionInterceptUrlRegistry requests) { <ide> // Permit access to the non-sensitive endpoints <ide> requests.requestMatchers( <del> new EndpointPathRequestMatcher(EndpointPaths.get( <del> this.endpointHandlerMapping, false))).permitAll(); <add> new EndpointPathRequestMatcher(EndpointPaths.NON_SENSITIVE)) <add> .permitAll(); <ide> // Restrict the rest to the configured role <ide> requests.anyRequest().hasRole(this.management.getSecurity().getRole()); <ide> } <ide> <ide> private final class EndpointPathRequestMatcher implements RequestMatcher { <ide> <add> private final EndpointPaths endpointPaths; <add> <ide> private RequestMatcher delegate; <ide> <del> private String[] paths; <del> <del> EndpointPathRequestMatcher(String[] paths) { <del> this.paths = paths; <add> EndpointPathRequestMatcher(EndpointPaths endpointPaths) { <add> this.endpointPaths = endpointPaths; <ide> } <ide> <ide> @Override <ide> public boolean matches(HttpServletRequest request) { <del> ManagementWebSecurityConfigurerAdapter.this <del> .deduceEndpointHandlerMappingIfMissing(); <ide> if (this.delegate == null) { <ide> this.delegate = createDelegate(); <ide> } <ide> private RequestMatcher createDelegate() { <ide> ServerProperties server = ManagementWebSecurityConfigurerAdapter.this.server; <ide> List<RequestMatcher> matchers = new ArrayList<RequestMatcher>(); <del> for (String path : this.paths) { <add> EndpointHandlerMapping endpointHandlerMapping = ManagementWebSecurityConfigurerAdapter.this <add> .getRequiredEndpointHandlerMapping(); <add> for (String path : this.endpointPaths.getPaths(endpointHandlerMapping)) { <ide> matchers.add(new AntPathRequestMatcher(server.getPath(path))); <ide> } <ide> return (matchers.isEmpty() ? AnyRequestMatcher.INSTANCE <ide> <ide> } <ide> <del> /** <del> * Helper class for extracting lists of paths from the EndpointHandlerMapping. <del> */ <del> private static class EndpointPaths { <del> <del> /** <del> * Get all the paths (sensitive and unsensitive). <del> * <del> * @param endpointHandlerMapping the mapping <del> * @return all paths <del> */ <del> public static String[] get(EndpointHandlerMapping endpointHandlerMapping) { <del> String[] insecure = get(endpointHandlerMapping, false); <del> String[] secure = get(endpointHandlerMapping, true); <del> return StringUtils.mergeStringArrays(insecure, secure); <del> } <del> <del> /** <del> * Get all the paths that are either sensitive or unsensitive. <del> * <del> * @param endpointHandlerMapping the mapping <del> * @param secure flag to say if we want the secure ones <del> * @return the relevant paths <del> */ <del> public static String[] get(EndpointHandlerMapping endpointHandlerMapping, <del> boolean secure) { <add> private enum EndpointPaths { <add> <add> ALL, <add> <add> NON_SENSITIVE { <add> @Override <add> protected boolean isIncluded(MvcEndpoint endpoint) { <add> return !endpoint.isSensitive(); <add> } <add> }; <add> <add> public String[] getPaths(EndpointHandlerMapping endpointHandlerMapping) { <ide> if (endpointHandlerMapping == null) { <ide> return NO_PATHS; <ide> } <ide> Set<? extends MvcEndpoint> endpoints = endpointHandlerMapping.getEndpoints(); <ide> Set<String> paths = new LinkedHashSet<String>(endpoints.size()); <ide> for (MvcEndpoint endpoint : endpoints) { <del> if (endpoint.isSensitive() == secure) { <add> if (isIncluded(endpoint)) { <ide> String path = endpointHandlerMapping.getPath(endpoint.getPath()); <ide> paths.add(path); <ide> if (!path.equals("")) { <del> if (secure) { <add> if (endpoint.isSensitive()) { <ide> // Ensure that nested paths are secured <ide> paths.add(path + "/**"); <ide> // Add Spring MVC-generated additional paths <ide> return paths.toArray(new String[paths.size()]); <ide> } <ide> <add> protected boolean isIncluded(MvcEndpoint endpoint) { <add> return true; <add> } <add> <ide> } <ide> <ide> }
JavaScript
mit
0d0f0c1e4ad285bada0f4bdcbd4eeb6e510980c1
0
DemocraciaEnRed/democraciaenred.github.io
import React, { Component } from 'react' import ThumbnailSlider from '../components/ThumbnailSlider' let Flickity; export default class extends Component { constructor(props) { super(props) this.flickity = null } componentDidMount () { Flickity = require('flickity') } componentDidUpdate () { if (this.flkty) this.flkty.destroy() const options = { cellCelector: '.thumbnail-item', pageDots: false, wrapAround: false, cellAlign: 'center', draggable: true, contain: true, prevNextButtons: true } this.flkty = new Flickity(this.refs.carousel, options) } componentWillUnmount () { this.flkty.destroy() } render () { return ( <div className='thumbnails-container' ref='carousel'> {this.props.videos.map((video) => <ThumbnailSlider id={video.id} thumbnail={video.thumbnail} title={video.title} key={video.id} handleClick={this.props.handleClick} /> )} <style jsx>{` .thumbnails-container { width: 800px; } `}</style> </div> ) } }
sections/about/containers/SliderContainer.js
import React, { Component } from 'react' import ThumbnailSlider from '../components/ThumbnailSlider' let Flickity; export default class extends Component { constructor(props) { super(props) this.flickity = null } componentDidMount () { Flickity = require('flickity') } componentDidUpdate () { if (this.flkty) this.flkty.destroy() const options = { cellCelector: '.thumbnail-item', pageDots: false, wrapAround: true, cellAlign: 'center', draggable: true, contain: true, prevNextButtons: true } this.flkty = new Flickity(this.refs.carousel, options) } componentWillUnmount () { this.flkty.destroy() } render () { return ( <div className='thumbnails-container' ref='carousel'> {this.props.videos.map((video) => <ThumbnailSlider id={video.id} thumbnail={video.thumbnail} title={video.title} key={video.id} handleClick={this.props.handleClick} /> )} <style jsx>{` .thumbnails-container { width: 800px; } `}</style> </div> ) } }
fix slider
sections/about/containers/SliderContainer.js
fix slider
<ide><path>ections/about/containers/SliderContainer.js <ide> const options = { <ide> cellCelector: '.thumbnail-item', <ide> pageDots: false, <del> wrapAround: true, <add> wrapAround: false, <ide> cellAlign: 'center', <ide> draggable: true, <ide> contain: true,
Java
apache-2.0
error: pathspec 'src/main/java/com/spatial4j/core/math/IntersectUtils.java' did not match any file(s) known to git
e26496ecfee081d5d5faca2ebca7adc8bd0d2ddc
1
varsis/spatial4j
package com.spatial4j.core.math; /** * Created with IntelliJ IDEA. * User: rfalford12 * Date: 3/4/14 * Time: 3:06 PM * To change this template use File | Settings | File Templates. */ public class IntersectUtils { }
src/main/java/com/spatial4j/core/math/IntersectUtils.java
Adding class for intersection utils - will code that and implement after polygon rep code
src/main/java/com/spatial4j/core/math/IntersectUtils.java
Adding class for intersection utils - will code that and implement after polygon rep code
<ide><path>rc/main/java/com/spatial4j/core/math/IntersectUtils.java <add>package com.spatial4j.core.math; <add> <add>/** <add> * Created with IntelliJ IDEA. <add> * User: rfalford12 <add> * Date: 3/4/14 <add> * Time: 3:06 PM <add> * To change this template use File | Settings | File Templates. <add> */ <add>public class IntersectUtils { <add>}
Java
mit
6592c79a426767e54090807a025efa4a04077dfd
0
TickleThePanda/fitbit-intraday-visualiser,TickleThePanda/health-vis
package uk.co.ticklethepanda.health.weight; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.knowm.xchart.XYChart; import org.knowm.xchart.XYChartBuilder; import org.knowm.xchart.XYSeries; import org.knowm.xchart.style.Styler; import org.knowm.xchart.style.markers.SeriesMarkers; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.scheduling.annotation.Scheduled; import org.springframework.stereotype.Service; import uk.co.ticklethepanda.health.ChartConfig; import uk.co.ticklethepanda.utility.image.PngToByteArray; import java.awt.*; import java.awt.font.TextAttribute; import java.awt.image.BufferedImage; import java.io.IOException; import java.time.LocalDate; import java.time.ZoneId; import java.util.Collections; import java.util.Date; import java.util.List; import java.util.stream.Collectors; import static uk.co.ticklethepanda.health.weight.AveragedWeight.calculateAverageWeighs; @Service public class WeightChartService { private static final Logger LOG = LogManager.getLogger(); private final WeightService weightService; private byte[] weightChart; private byte[] recentWeightChart; public WeightChartService(@Autowired WeightService weightService) { this.weightService = weightService; } public byte[] getWeightChart() throws IOException { if (weightChart == null) { cacheWeightChart(); } return weightChart; } public byte[] getRecentWeightChart() throws IOException { if (recentWeightChart == null) { cacheRecentWeightChart(); } return recentWeightChart; } public byte[] getChartBetweenDates(LocalDate start, LocalDate end) throws IOException { LOG.info("caching weight chart"); List<AveragedWeight> weights = calculateAverageWeighs(weightService.getAllWeightWithEntries()) .stream() .filter(w -> (start == null || w.getDate().isAfter(start)) && (end == null || w.getDate().isBefore(end))) .collect(Collectors.toList()); BufferedImage bufferedImage = createChart(weights); return PngToByteArray.convert(bufferedImage); } @Scheduled(fixedRate = 1000 * 60, initialDelay = 1) public void cacheRecentWeightChart() throws IOException { LOG.info("caching recent weight chart"); LocalDate aMonthAgo = LocalDate.now().minusDays(30); List<AveragedWeight> weights = calculateAverageWeighs(weightService.getAllWeightWithEntries()) .stream() .filter(w -> w.getDate().isAfter(aMonthAgo)) .collect(Collectors.toList()); BufferedImage bufferedImage = createChart(weights); this.recentWeightChart = PngToByteArray.convert(bufferedImage); } @Scheduled(fixedRate = 1000 * 60, initialDelay = 1) public void cacheWeightChart() throws IOException { LOG.info("caching weight chart"); BufferedImage bufferedImage = createChart( calculateAverageWeighs(weightService.getAllWeightWithEntries())); this.weightChart = PngToByteArray.convert(bufferedImage); } private BufferedImage createChart(List<AveragedWeight> weights) { final int chartWidth = 1000; final int chartHeight = 500; final int minMarkerSize = 4; final int markerSizeModifier = 8; List<Double> yData = weights.stream() .map(w -> w.getAverage()) .collect(Collectors.toList()); List<Date> xData = weights.stream() .map(w -> Date.from(w.getDate().atStartOfDay(ZoneId.systemDefault()).toInstant())) .collect(Collectors.toList()); XYChart chart = new XYChartBuilder() .width(chartWidth) .height(chartHeight) .xAxisTitle("Date") .yAxisTitle("Weight (kg)") .theme(Styler.ChartTheme.GGPlot2) .build(); Font font = chart.getStyler().getAxisTickLabelsFont(); chart.getStyler().setDefaultSeriesRenderStyle(XYSeries.XYSeriesRenderStyle.Scatter); chart.getStyler().setLegendVisible(false); chart.getStyler().setAxisTickLabelsFont(font.deriveFont( Collections.singletonMap( TextAttribute.WEIGHT, TextAttribute.WEIGHT_LIGHT))); chart.getStyler().setDatePattern("YYYY-MM-dd"); chart.getStyler().setChartPadding(ChartConfig.CHART_PADDING); int markerSize = chartWidth / xData.size() / markerSizeModifier; markerSize = Math.min(markerSize, 10); markerSize = Math.max(markerSize, 4); chart.getStyler().setMarkerSize(markerSize); XYSeries series = chart.addSeries("data", xData, yData); series.setMarker(SeriesMarkers.CIRCLE); BufferedImage bufferedImage = new BufferedImage(chart.getWidth(), chart.getHeight(), BufferedImage.TYPE_INT_RGB); Graphics2D graphics2D = bufferedImage.createGraphics(); chart.paint(graphics2D, chart.getWidth(), chart.getHeight()); return bufferedImage; } }
src/main/java/uk/co/ticklethepanda/health/weight/WeightChartService.java
package uk.co.ticklethepanda.health.weight; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.knowm.xchart.XYChart; import org.knowm.xchart.XYChartBuilder; import org.knowm.xchart.XYSeries; import org.knowm.xchart.style.Styler; import org.knowm.xchart.style.markers.SeriesMarkers; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.scheduling.annotation.Scheduled; import org.springframework.stereotype.Service; import uk.co.ticklethepanda.health.ChartConfig; import uk.co.ticklethepanda.utility.image.PngToByteArray; import java.awt.*; import java.awt.font.TextAttribute; import java.awt.image.BufferedImage; import java.io.IOException; import java.time.LocalDate; import java.time.ZoneId; import java.util.Collections; import java.util.Date; import java.util.List; import java.util.stream.Collectors; import static uk.co.ticklethepanda.health.weight.AveragedWeight.calculateAverageWeighs; @Service public class WeightChartService { private static final Logger LOG = LogManager.getLogger(); private final WeightService weightService; private byte[] weightChart; private byte[] recentWeightChart; public WeightChartService(@Autowired WeightService weightService) { this.weightService = weightService; } public byte[] getWeightChart() throws IOException { if (weightChart == null) { cacheWeightChart(); } return weightChart; } public byte[] getRecentWeightChart() throws IOException { if (recentWeightChart == null) { cacheRecentWeightChart(); } return recentWeightChart; } public byte[] getChartBetweenDates(LocalDate start, LocalDate end) throws IOException { LOG.info("caching weight chart"); List<AveragedWeight> weights = calculateAverageWeighs(weightService.getAllWeightWithEntries()) .stream() .filter(w -> (start == null || w.getDate().isAfter(start)) && (end == null || w.getDate().isBefore(end))) .collect(Collectors.toList()); BufferedImage bufferedImage = createChart(weights); return PngToByteArray.convert(bufferedImage); } @Scheduled(fixedRate = 1000 * 60, initialDelay = 1) public void cacheRecentWeightChart() throws IOException { LOG.info("caching recent weight chart"); LocalDate aMonthAgo = LocalDate.now().minusDays(30); List<AveragedWeight> weights = calculateAverageWeighs(weightService.getAllWeightWithEntries()) .stream() .filter(w -> w.getDate().isAfter(aMonthAgo)) .collect(Collectors.toList()); BufferedImage bufferedImage = createChart(weights); this.recentWeightChart = PngToByteArray.convert(bufferedImage); } @Scheduled(fixedRate = 1000 * 60, initialDelay = 1) public void cacheWeightChart() throws IOException { LOG.info("caching weight chart"); BufferedImage bufferedImage = createChart( calculateAverageWeighs(weightService.getAllWeightWithEntries())); this.weightChart = PngToByteArray.convert(bufferedImage); } private BufferedImage createChart(List<AveragedWeight> weights) { List<Double> yData = weights.stream() .map(w -> w.getAverage()) .collect(Collectors.toList()); List<Date> xData = weights.stream() .map(w -> Date.from(w.getDate().atStartOfDay(ZoneId.systemDefault()).toInstant())) .collect(Collectors.toList()); XYChart chart = new XYChartBuilder() .width(1000) .height(500) .xAxisTitle("Date") .yAxisTitle("Weight (kg)") .theme(Styler.ChartTheme.GGPlot2) .build(); Font font = chart.getStyler().getAxisTickLabelsFont(); chart.getStyler().setDefaultSeriesRenderStyle(XYSeries.XYSeriesRenderStyle.Scatter); chart.getStyler().setLegendVisible(false); chart.getStyler().setAxisTickLabelsFont(font.deriveFont( Collections.singletonMap( TextAttribute.WEIGHT, TextAttribute.WEIGHT_LIGHT))); chart.getStyler().setDatePattern("YYYY-MM-dd"); chart.getStyler().setChartPadding(ChartConfig.CHART_PADDING); chart.getStyler().setMarkerSize(4); XYSeries series = chart.addSeries("data", xData, yData); series.setMarker(SeriesMarkers.CIRCLE); BufferedImage bufferedImage = new BufferedImage(chart.getWidth(), chart.getHeight(), BufferedImage.TYPE_INT_RGB); Graphics2D graphics2D = bufferedImage.createGraphics(); chart.paint(graphics2D, chart.getWidth(), chart.getHeight()); return bufferedImage; } }
variable marker size for weight chart
src/main/java/uk/co/ticklethepanda/health/weight/WeightChartService.java
variable marker size for weight chart
<ide><path>rc/main/java/uk/co/ticklethepanda/health/weight/WeightChartService.java <ide> } <ide> <ide> private BufferedImage createChart(List<AveragedWeight> weights) { <add> final int chartWidth = 1000; <add> final int chartHeight = 500; <add> final int minMarkerSize = 4; <add> final int markerSizeModifier = 8; <add> <ide> List<Double> yData = weights.stream() <ide> .map(w -> w.getAverage()) <ide> .collect(Collectors.toList()); <ide> .collect(Collectors.toList()); <ide> <ide> XYChart chart = new XYChartBuilder() <del> .width(1000) <del> .height(500) <add> .width(chartWidth) <add> .height(chartHeight) <ide> .xAxisTitle("Date") <ide> .yAxisTitle("Weight (kg)") <ide> .theme(Styler.ChartTheme.GGPlot2) <ide> TextAttribute.WEIGHT, TextAttribute.WEIGHT_LIGHT))); <ide> chart.getStyler().setDatePattern("YYYY-MM-dd"); <ide> chart.getStyler().setChartPadding(ChartConfig.CHART_PADDING); <del> chart.getStyler().setMarkerSize(4); <add> <add> int markerSize = chartWidth / xData.size() / markerSizeModifier; <add> markerSize = Math.min(markerSize, 10); <add> markerSize = Math.max(markerSize, 4); <add> chart.getStyler().setMarkerSize(markerSize); <add> <ide> <ide> XYSeries series = chart.addSeries("data", xData, yData); <ide>
Java
apache-2.0
f08b9fac2c233910d2219567f47863b549fac527
0
huitseeker/deeplearning4j,kinbod/deeplearning4j,dmmiller612/deeplearning4j,dmmiller612/deeplearning4j,kinbod/deeplearning4j,huitseeker/deeplearning4j,dmmiller612/deeplearning4j,huitseeker/deeplearning4j,huitseeker/deeplearning4j,kinbod/deeplearning4j,dmmiller612/deeplearning4j,kinbod/deeplearning4j,dmmiller612/deeplearning4j,huitseeker/deeplearning4j,huitseeker/deeplearning4j,dmmiller612/deeplearning4j,huitseeker/deeplearning4j,kinbod/deeplearning4j,kinbod/deeplearning4j
/* * * * Copyright 2016 Skymind,Inc. * * * * Licensed under the Apache License, Version 2.0 (the "License"); * * you may not use this file except in compliance with the License. * * You may obtain a copy of the License at * * * * http://www.apache.org/licenses/LICENSE-2.0 * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * * See the License for the specific language governing permissions and * * limitations under the License. * */ package org.deeplearning4j.nn.modelimport.keras; import lombok.extern.slf4j.Slf4j; import org.bytedeco.javacpp.BytePointer; import org.bytedeco.javacpp.FloatPointer; import org.bytedeco.javacpp.Loader; import org.bytedeco.javacpp.hdf5; import org.deeplearning4j.nn.conf.ComputationGraphConfiguration; import org.deeplearning4j.nn.conf.MultiLayerConfiguration; import org.deeplearning4j.nn.graph.ComputationGraph; import org.deeplearning4j.nn.multilayer.MultiLayerNetwork; import org.nd4j.linalg.api.ndarray.INDArray; import org.nd4j.linalg.factory.Nd4j; import org.nd4j.shade.jackson.core.type.TypeReference; import org.nd4j.shade.jackson.databind.DeserializationFeature; import org.nd4j.shade.jackson.databind.ObjectMapper; import java.io.IOException; import java.io.InputStream; import java.lang.Exception; import java.nio.file.Files; import java.nio.file.Paths; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.regex.Matcher; import java.util.regex.Pattern; import static org.bytedeco.javacpp.hdf5.*; import static org.deeplearning4j.nn.modelimport.keras.KerasModel.*; /** * Reads stored Keras configurations and weights from one of two archives: * either (1) a single HDF5 file storing model and training JSON configurations * and weights or (2) separate text file storing model JSON configuration and * HDF5 file storing weights. * * @author [email protected] */ @Slf4j public class KerasModelImport { static { try { /* This is necessary for the call to the BytePointer constructor below. */ Loader.load(hdf5.class); } catch (Exception e) { e.printStackTrace(); } } private String modelJson; // model configuration JSON string private String trainingJson; // training configuration JSON string private String modelClassName; // Keras model class name private Map<String,Map<String,INDArray>> weights; // map from layer to parameter to weights /** * Load Keras (Functional API) Model saved using model.save_model(...). * * @param modelHdf5Stream InputStream containing HDF5 archive storing Keras Model * @return ComputationGraph * @throws IOException * @throws InvalidKerasConfigurationException * @throws UnsupportedKerasConfigurationException * @see ComputationGraph */ public static ComputationGraph importKerasModelAndWeights(InputStream modelHdf5Stream) throws IOException, InvalidKerasConfigurationException, UnsupportedKerasConfigurationException { throw new UnsupportedOperationException(""); // KerasModel kerasModel = new KerasModel.ModelBuilder() // .modelHdf5InputStream(modelHdf5Stream) // .enforceTrainingConfig(false) // .buildModel(); // ComputationGraph model = kerasModel.getComputationGraph(); // return model; } /** * Load Keras Sequential model saved using model.save_model(...). * * @param modelHdf5Stream InputStream containing HDF5 archive storing Keras Sequential model * @return ComputationGraph * @throws IOException * @throws InvalidKerasConfigurationException * @throws UnsupportedKerasConfigurationException * @see ComputationGraph */ public static MultiLayerNetwork importKerasSequentialModelAndWeights(InputStream modelHdf5Stream) throws IOException, InvalidKerasConfigurationException, UnsupportedKerasConfigurationException { throw new UnsupportedOperationException(""); // KerasSequentialModel kerasModel = new KerasModel.ModelBuilder() // .modelHdf5InputStream(modelHdf5Stream) // .enforceTrainingConfig(false) // .buildSequential(); // MultiLayerNetwork model = kerasModel.getMultiLayerNetwork(); // return model; } /** * Load Keras (Functional API) Model saved using model.save_model(...). * * @param modelHdf5Filename path to HDF5 archive storing Keras Model * @return ComputationGraph * @throws IOException * @throws InvalidKerasConfigurationException * @throws UnsupportedKerasConfigurationException * @see ComputationGraph */ public static ComputationGraph importKerasModelAndWeights(String modelHdf5Filename) throws IOException, InvalidKerasConfigurationException, UnsupportedKerasConfigurationException { KerasModel kerasModel = new KerasModel.ModelBuilder() .modelHdf5Filename(modelHdf5Filename) .enforceTrainingConfig(false) .buildModel(); ComputationGraph model = kerasModel.getComputationGraph(); return model; } /** * Load Keras Sequential model saved using model.save_model(...). * * @param modelHdf5Filename path to HDF5 archive storing Keras Sequential model * @return MultiLayerNetwork * @throws IOException * @see MultiLayerNetwork */ public static MultiLayerNetwork importKerasSequentialModelAndWeights(String modelHdf5Filename) throws IOException, InvalidKerasConfigurationException, UnsupportedKerasConfigurationException { KerasSequentialModel kerasModel = new KerasSequentialModel.ModelBuilder() .modelHdf5Filename(modelHdf5Filename) .enforceTrainingConfig(false) .buildSequential(); MultiLayerNetwork model = kerasModel.getMultiLayerNetwork(); return model; } /** * Load Keras (Functional API) Model for which the configuration and weights were * saved separately using calls to model.to_json() and model.save_weights(...). * * @param modelJsonFilename path to JSON file storing Keras Model configuration * @param weightsHdf5Filename path to HDF5 archive storing Keras model weights * @return ComputationGraph * @throws IOException * @see ComputationGraph */ public static ComputationGraph importKerasModelAndWeights(String modelJsonFilename, String weightsHdf5Filename) throws IOException, InvalidKerasConfigurationException, UnsupportedKerasConfigurationException { KerasModel kerasModel = new KerasModel.ModelBuilder() .modelJsonFilename(modelJsonFilename) .weightsHdf5Filename(weightsHdf5Filename) .enforceTrainingConfig(false) .buildModel(); ComputationGraph model = kerasModel.getComputationGraph(); return model; } /** * Load Keras Sequential model for which the configuration and weights were * saved separately using calls to model.to_json() and model.save_weights(...). * * @param modelJsonFilename path to JSON file storing Keras Sequential model configuration * @param weightsHdf5Filename path to HDF5 archive storing Keras model weights * @return MultiLayerNetwork * @throws IOException * @see MultiLayerNetwork */ public static MultiLayerNetwork importKerasSequentialModelAndWeights(String modelJsonFilename, String weightsHdf5Filename) throws IOException, InvalidKerasConfigurationException, UnsupportedKerasConfigurationException { KerasSequentialModel kerasModel = new KerasSequentialModel.ModelBuilder() .modelJsonFilename(modelJsonFilename) .weightsHdf5Filename(weightsHdf5Filename) .enforceTrainingConfig(false) .buildSequential(); MultiLayerNetwork model = kerasModel.getMultiLayerNetwork(); return model; } /** * Load Keras (Functional API) Model for which the configuration was saved * separately using calls to model.to_json() and model.save_weights(...). * * @param modelJsonFilename path to JSON file storing Keras Model configuration * @return ComputationGraph * @throws IOException * @see ComputationGraph */ public static ComputationGraphConfiguration importKerasModelConfiguration(String modelJsonFilename) throws IOException, InvalidKerasConfigurationException, UnsupportedKerasConfigurationException { KerasModel kerasModel = new KerasModel.ModelBuilder() .modelJsonFilename(modelJsonFilename) .enforceTrainingConfig(false) .buildModel(); return kerasModel.getComputationGraphConfiguration(); } /** * Load Keras Sequential model for which the configuration was saved * separately using calls to model.to_json() and model.save_weights(...). * * @param modelJsonFilename path to JSON file storing Keras Sequential model configuration * @return MultiLayerNetwork * @throws IOException * @see MultiLayerNetwork */ public static MultiLayerConfiguration importKerasSequentialConfiguration(String modelJsonFilename) throws IOException, InvalidKerasConfigurationException, UnsupportedKerasConfigurationException { KerasSequentialModel kerasModel = new KerasSequentialModel.ModelBuilder() .modelJsonFilename(modelJsonFilename) .enforceTrainingConfig(false) .buildSequential(); return kerasModel.getMultiLayerConfiguration(); } /** * Constructor from HDF5 model archive stored in InputStream. * * @param modelHdf5Stream InputStream containing HDF5 archive of Keras model * @throws IOException * * TODO: Currently, this constructor does not work. It does not appear to be * possible to open an HDF5 archive from raw bytes. */ public KerasModelImport(InputStream modelHdf5Stream) throws UnsupportedOperationException, IOException, UnsupportedKerasConfigurationException, InvalidKerasConfigurationException { log.warn("Importing a Keras model from an InputStream pointing to contents of an HDF5 archive currently not supported."); throw new UnsupportedOperationException("Importing a Keras model from an InputStream currently not supported " + "because it is not possible to load an HDF5 file from a memory buffer using the HDF5 C++ API. " + "See: http://stackoverflow.com/questions/18449972/how-can-i-open-hdf5-file-from-memory-buffer-using-hdf5-c-api"); /* One very hacky workaround would be to write the InputStream out to * a temporary file and then use the "from filename" constructor to * import from that file, as follows: * * File tempFile = File.createTempFile("temporary_model_archive",".h5"); * tempFile.deleteOnExit(); * tempFile.canWrite(); * FileOutputStream tempOutputStream = new FileOutputStream(tempFile); * IOUtils.copy(modelHdf5Stream, tempOutputStream); * tempOutputStream.close(); * String tempFilename = tempFile.getAbsolutePath(); * super(tempFilename); */ } /** * Constructor from HDF5 model archive. * * @param modelHdf5Filename path to HDF5 archive storing Keras model * @throws IOException */ public KerasModelImport(String modelHdf5Filename) throws IOException, UnsupportedKerasConfigurationException, InvalidKerasConfigurationException { /* Open HDF5 archive model file. */ hdf5.H5File file = new hdf5.H5File(modelHdf5Filename, H5F_ACC_RDONLY); /* Read model and training configurations from top-level attributes. */ this.modelJson = readJsonStringFromHdf5Attribute(file, "model_config"); this.modelClassName = getModelClassName(this.modelJson); this.trainingJson = readJsonStringFromHdf5Attribute(file, "training_config"); /* Read weights from "/weights" group. */ this.weights = readWeightsFromHdf5(file, "/model_weights"); file.close(); } /** * Constructor that takes filenames for JSON model configuration and for * HDF5 weights archive. * * @param modelJsonFilename path to JSON file storing Keras Sequential model configuration * @param weightsHdf5Filename path to HDF5 archive storing Keras model weights * @throws IOException */ public KerasModelImport(String modelJsonFilename, String weightsHdf5Filename) throws IOException, InvalidKerasConfigurationException, UnsupportedKerasConfigurationException { /* Read model configuration from JSON file. */ this.modelJson = new String(Files.readAllBytes(Paths.get(modelJsonFilename))); this.modelClassName = getModelClassName(this.modelJson); /* Open HDF5 archive weights file. */ hdf5.H5File file = new hdf5.H5File(weightsHdf5Filename, H5F_ACC_RDONLY); /* Read weights from root ("/") group. */ this.weights = readWeightsFromHdf5(file, "/"); file.close(); } /** * Get model configuration JSON. * * @return model configuration JSON as string */ public String getModelJson() { return this.modelJson; } /** * Get training configuration JSON. * * @return training configuration JSON as string */ public String getTrainingJson() { return this.trainingJson; } /** * Get model class name (Model, Sequential, etc.). * * @return model class name as String */ public String getModelClassName() { return this.modelClassName; } /** * Get model weights stored as map from layer to parameter to INDArray. * * @return model weights as map from layer name to param name to INDArray */ public Map<String, Map<String, INDArray>> getWeights() { return this.weights; } /** * Read Keras model weights from specified HDF5 file and Group into a map * from layer to parameter to weights (INDArray). * * @param file open HDF5 archive file * @param weightsGroupName name of root HDF5 Group storing all Keras weights for single model * @return nested Map from layer names to parameter names to INDArrays */ private static Map<String,Map<String,INDArray>> readWeightsFromHdf5(hdf5.H5File file, String weightsGroupName) throws UnsupportedKerasConfigurationException, InvalidKerasConfigurationException { hdf5.Group weightsGroup = file.asCommonFG().openGroup(weightsGroupName); Map<String,Map<String,INDArray>> weightsMap = new HashMap<String,Map<String,INDArray>>(); List<hdf5.Group> groups = new ArrayList<hdf5.Group>(); groups.add(weightsGroup); while (!groups.isEmpty()) { hdf5.Group g = groups.remove(0); for (int i = 0; i < g.asCommonFG().getNumObjs(); i++) { BytePointer objPtr = g.asCommonFG().getObjnameByIdx(i); String objName = objPtr.getString(); int objType = g.asCommonFG().childObjType(objPtr); switch (objType) { case H5O_TYPE_DATASET: /* Keras parameter names are typically formatted as [layer name]_[layer no]_[parameter]. * For example, the weight matrix in the first Dense layer will be named "dense_1_W." */ Pattern paramNamePattern = Pattern.compile("_([^_]+?)$"); Matcher paramNameMatcher = paramNamePattern.matcher(objName); if (!paramNameMatcher.find()) throw new InvalidKerasConfigurationException("Unable to parse layer/parameter name " + objName + " for stored weights."); String paramName = paramNameMatcher.group(1); String layerName = paramNameMatcher.replaceFirst(""); /* TensorFlow backend often appends ":" followed by one * or more digits to parameter names, but this is not * reflected in the model config. We must strip it off. */ Pattern p = Pattern.compile(":\\d+?$"); Matcher m = p.matcher(paramName); if (m.find()) paramName = m.replaceFirst(""); hdf5.DataSet d = g.asCommonFG().openDataSet(objPtr); hdf5.DataSpace space = d.getSpace(); int nbDims = (int)space.getSimpleExtentNdims(); long[] dims = new long[nbDims]; space.getSimpleExtentDims(dims); float[] weightBuffer = null; FloatPointer fp = null; int j = 0; INDArray weights = null; switch (nbDims) { case 4: /* 2D Convolution weights */ weightBuffer = new float[(int)(dims[0]*dims[1]*dims[2]*dims[3])]; fp = new FloatPointer(weightBuffer); d.read(fp, new hdf5.DataType(hdf5.PredType.NATIVE_FLOAT())); fp.get(weightBuffer); weights = Nd4j.create((int)dims[0], (int)dims[1], (int)dims[2], (int)dims[3]); j = 0; for (int i1 = 0; i1 < dims[0]; i1++) for (int i2 = 0; i2 < dims[1]; i2++) for (int i3 = 0; i3 < dims[2]; i3++) for (int i4 = 0; i4 < dims[3]; i4++) weights.putScalar(i1, i2, i3, i4, weightBuffer[j++]); break; case 2: /* Dense and Recurrent weights */ weightBuffer = new float[(int)(dims[0]*dims[1])]; fp = new FloatPointer(weightBuffer); d.read(fp, new hdf5.DataType(hdf5.PredType.NATIVE_FLOAT())); fp.get(weightBuffer); weights = Nd4j.create((int)dims[0], (int)dims[1]); j = 0; for (int i1 = 0; i1 < dims[0]; i1++) for (int i2 = 0; i2 < dims[1]; i2++) weights.putScalar(i1, i2, weightBuffer[j++]); break; case 1: /* Bias */ weightBuffer = new float[(int)dims[0]]; fp = new FloatPointer(weightBuffer); d.read(fp, new hdf5.DataType(hdf5.PredType.NATIVE_FLOAT())); fp.get(weightBuffer); weights = Nd4j.create((int)dims[0]); j = 0; for (int i1 = 0; i1 < dims[0]; i1++) weights.putScalar(i1, weightBuffer[j++]); break; default: throw new UnsupportedKerasConfigurationException("Cannot import weights with rank " + nbDims); } if (!weightsMap.containsKey(layerName)) weightsMap.put(layerName, new HashMap<String, INDArray>()); weightsMap.get(layerName).put(paramName, weights); d.close(); break; case H5O_TYPE_GROUP: default: groups.add(g.asCommonFG().openGroup(objPtr)); break; } } g.close(); } file.close(); return weightsMap; } /** * Read contents of top-level string attribute from HDF5 File archive. * * @param file HDF5 File * @param attribute name of attribute * @return contents of attribute as String */ private static String readJsonStringFromHdf5Attribute(hdf5.H5File file, String attribute) throws InvalidKerasConfigurationException { hdf5.Attribute attr = file.openAttribute(attribute); hdf5.VarLenType vl = attr.getVarLenType(); int bufferSizeMult = 1; String jsonString = null; /* TODO: find a less hacky way to do this. * Reading variable length strings (from attributes) is a giant * pain. There does not appear to be any way to determine the * length of the string in advance, so we use a hack: choose a * buffer size and read the config. If Jackson fails to parse * it, then we must not have read the entire config. Increase * buffer and repeat. */ while (true) { byte[] attrBuffer = new byte[bufferSizeMult * 2000]; BytePointer attrPointer = new BytePointer(attrBuffer); attr.read(vl, attrPointer); attrPointer.get(attrBuffer); jsonString = new String(attrBuffer); ObjectMapper mapper = new ObjectMapper(); mapper.enable(DeserializationFeature.FAIL_ON_READING_DUP_TREE_KEY); try { mapper.readTree(jsonString); break; } catch (IOException e) {} bufferSizeMult++; if (bufferSizeMult > 100) { throw new InvalidKerasConfigurationException("Could not read abnormally long Keras config. Please file an issue!"); } } return jsonString; } /** * Convenience function for parsing JSON strings. * * @param modelJson string containing valid JSON * @return nested Map with arbitrary depth * @throws IOException */ private static String getModelClassName(String modelJson) throws IOException, InvalidKerasConfigurationException { ObjectMapper mapper = new ObjectMapper(); TypeReference<HashMap<String,Object>> typeRef = new TypeReference<HashMap<String,Object>>() {}; Map<String,Object> modelConfig = mapper.readValue(modelJson, typeRef); if (!modelConfig.containsKey(MODEL_FIELD_CLASS_NAME)) throw new InvalidKerasConfigurationException("Unable to determine Keras model class name."); return (String)modelConfig.get(MODEL_FIELD_CLASS_NAME); } }
deeplearning4j-modelimport/src/main/java/org/deeplearning4j/nn/modelimport/keras/KerasModelImport.java
/* * * * Copyright 2016 Skymind,Inc. * * * * Licensed under the Apache License, Version 2.0 (the "License"); * * you may not use this file except in compliance with the License. * * You may obtain a copy of the License at * * * * http://www.apache.org/licenses/LICENSE-2.0 * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * * See the License for the specific language governing permissions and * * limitations under the License. * */ package org.deeplearning4j.nn.modelimport.keras; import lombok.extern.slf4j.Slf4j; import org.bytedeco.javacpp.BytePointer; import org.bytedeco.javacpp.FloatPointer; import org.bytedeco.javacpp.Loader; import org.bytedeco.javacpp.hdf5; import org.deeplearning4j.nn.conf.ComputationGraphConfiguration; import org.deeplearning4j.nn.conf.MultiLayerConfiguration; import org.deeplearning4j.nn.graph.ComputationGraph; import org.deeplearning4j.nn.multilayer.MultiLayerNetwork; import org.nd4j.linalg.api.ndarray.INDArray; import org.nd4j.linalg.factory.Nd4j; import org.nd4j.shade.jackson.core.type.TypeReference; import org.nd4j.shade.jackson.databind.DeserializationFeature; import org.nd4j.shade.jackson.databind.ObjectMapper; import java.io.IOException; import java.io.InputStream; import java.lang.Exception; import java.nio.file.Files; import java.nio.file.Paths; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.regex.Matcher; import java.util.regex.Pattern; import static org.bytedeco.javacpp.hdf5.*; import static org.deeplearning4j.nn.modelimport.keras.KerasModel.*; /** * Reads stored Keras configurations and weights from one of two archives: * either (1) a single HDF5 file storing model and training JSON configurations * and weights or (2) separate text file storing model JSON configuration and * HDF5 file storing weights. * * @author [email protected] */ @Slf4j public class KerasModelImport { static { try { /* This is necessary for the call to the BytePointer constructor below. */ Loader.load(hdf5.class); } catch (Exception e) { e.printStackTrace(); } } private String modelJson; // model configuration JSON string private String trainingJson; // training configuration JSON string private String modelClassName; // Keras model class name private Map<String,Map<String,INDArray>> weights; // map from layer to parameter to weights /** * Load Keras (Functional API) Model saved using model.save_model(...). * * @param modelHdf5Stream InputStream containing HDF5 archive storing Keras Model * @return ComputationGraph * @throws IOException * @throws InvalidKerasConfigurationException * @throws UnsupportedKerasConfigurationException * @see ComputationGraph */ public static ComputationGraph importKerasModelAndWeights(InputStream modelHdf5Stream) throws IOException, InvalidKerasConfigurationException, UnsupportedKerasConfigurationException { KerasModelImport archive = new KerasModelImport(modelHdf5Stream); if (!archive.getModelClassName().equals(MODEL_CLASS_NAME_MODEL)) throw new InvalidKerasConfigurationException("Expected Keras model class name Model (found " + archive.getModelClassName() + ")"); KerasModel kerasModel = new KerasModel.ModelBuilder() .modelJson(archive.getModelJson()) .trainingJson(archive.getTrainingJson()) // .weights(archive.getWeights()) .enforceTrainingConfig(false) .buildModel(); ComputationGraph model = kerasModel.getComputationGraph(); return model; } /** * Load Keras Sequential model saved using model.save_model(...). * * @param modelHdf5Stream InputStream containing HDF5 archive storing Keras Sequential model * @return ComputationGraph * @throws IOException * @throws InvalidKerasConfigurationException * @throws UnsupportedKerasConfigurationException * @see ComputationGraph */ public static MultiLayerNetwork importKerasSequentialModelAndWeights(InputStream modelHdf5Stream) throws IOException, InvalidKerasConfigurationException, UnsupportedKerasConfigurationException { KerasModelImport archive = new KerasModelImport(modelHdf5Stream); if (!archive.getModelClassName().equals(MODEL_CLASS_NAME_MODEL)) throw new InvalidKerasConfigurationException("Expected Keras model class name Model (found " + archive.getModelClassName() + ")"); KerasSequentialModel kerasModel = new KerasModel.ModelBuilder() .modelJson(archive.getModelJson()) .trainingJson(archive.getTrainingJson()) // .weights(archive.getWeights()) .enforceTrainingConfig(false) .buildSequential(); MultiLayerNetwork model = kerasModel.getMultiLayerNetwork(); return model; } /** * Load Keras (Functional API) Model saved using model.save_model(...). * * @param modelHdf5Filename path to HDF5 archive storing Keras Model * @return ComputationGraph * @throws IOException * @throws InvalidKerasConfigurationException * @throws UnsupportedKerasConfigurationException * @see ComputationGraph */ public static ComputationGraph importKerasModelAndWeights(String modelHdf5Filename) throws IOException, InvalidKerasConfigurationException, UnsupportedKerasConfigurationException { KerasModelImport archive = new KerasModelImport(modelHdf5Filename); if (!archive.getModelClassName().equals(MODEL_CLASS_NAME_MODEL)) throw new InvalidKerasConfigurationException("Expected Keras model class name Model (found " + archive.getModelClassName() + ")"); KerasModel kerasModel = new KerasModel.ModelBuilder() .modelJson(archive.getModelJson()) .trainingJson(archive.getTrainingJson()) // .weights(archive.getWeights()) .enforceTrainingConfig(false) .buildModel(); ComputationGraph model = kerasModel.getComputationGraph(); return model; } /** * Load Keras Sequential model saved using model.save_model(...). * * @param modelHdf5Filename path to HDF5 archive storing Keras Sequential model * @return MultiLayerNetwork * @throws IOException * @see MultiLayerNetwork */ public static MultiLayerNetwork importKerasSequentialModelAndWeights(String modelHdf5Filename) throws IOException, InvalidKerasConfigurationException, UnsupportedKerasConfigurationException { KerasSequentialModel kerasModel = new KerasSequentialModel.ModelBuilder() .modelHdf5Filename(modelHdf5Filename) .enforceTrainingConfig(false) .buildSequential(); MultiLayerNetwork model = kerasModel.getMultiLayerNetwork(); return model; } /** * Load Keras (Functional API) Model for which the configuration and weights were * saved separately using calls to model.to_json() and model.save_weights(...). * * @param modelJsonFilename path to JSON file storing Keras Model configuration * @param weightsHdf5Filename path to HDF5 archive storing Keras model weights * @return ComputationGraph * @throws IOException * @see ComputationGraph */ public static ComputationGraph importKerasModelAndWeights(String modelJsonFilename, String weightsHdf5Filename) throws IOException, InvalidKerasConfigurationException, UnsupportedKerasConfigurationException { KerasModelImport archive = new KerasModelImport(modelJsonFilename, weightsHdf5Filename); if (!archive.getModelClassName().equals(MODEL_CLASS_NAME_MODEL)) throw new InvalidKerasConfigurationException("Expected Keras model class name Model (found " + archive.getModelClassName() + ")"); KerasModel kerasModel = new KerasModel.ModelBuilder() .modelJson(archive.getModelJson()) // .weights(archive.getWeights()) .enforceTrainingConfig(false) .buildModel(); ComputationGraph model = kerasModel.getComputationGraph(); return model; } /** * Load Keras Sequential model for which the configuration and weights were * saved separately using calls to model.to_json() and model.save_weights(...). * * @param modelJsonFilename path to JSON file storing Keras Sequential model configuration * @param weightsHdf5Filename path to HDF5 archive storing Keras model weights * @return MultiLayerNetwork * @throws IOException * @see MultiLayerNetwork */ public static MultiLayerNetwork importKerasSequentialModelAndWeights(String modelJsonFilename, String weightsHdf5Filename) throws IOException, InvalidKerasConfigurationException, UnsupportedKerasConfigurationException { KerasSequentialModel kerasModel = new KerasSequentialModel.ModelBuilder() .modelJsonFilename(modelJsonFilename) .weightsHdf5Filename(weightsHdf5Filename) .enforceTrainingConfig(false) .buildSequential(); MultiLayerNetwork model = kerasModel.getMultiLayerNetwork(); return model; } /** * Load Keras (Functional API) Model for which the configuration was saved * separately using calls to model.to_json() and model.save_weights(...). * * @param modelJsonFilename path to JSON file storing Keras Model configuration * @return ComputationGraph * @throws IOException * @see ComputationGraph */ public static ComputationGraphConfiguration importKerasModelConfiguration(String modelJsonFilename) throws IOException, InvalidKerasConfigurationException, UnsupportedKerasConfigurationException { String modelJson = new String(Files.readAllBytes(Paths.get(modelJsonFilename))); KerasModel kerasModel = new KerasModel.ModelBuilder() .modelJson(modelJson) .enforceTrainingConfig(false) .buildModel(); return kerasModel.getComputationGraphConfiguration(); } /** * Load Keras Sequential model for which the configuration was saved * separately using calls to model.to_json() and model.save_weights(...). * * @param modelJsonFilename path to JSON file storing Keras Sequential model configuration * @return MultiLayerNetwork * @throws IOException * @see MultiLayerNetwork */ public static MultiLayerConfiguration importKerasSequentialConfiguration(String modelJsonFilename) throws IOException, InvalidKerasConfigurationException, UnsupportedKerasConfigurationException { String modelJson = new String(Files.readAllBytes(Paths.get(modelJsonFilename))); KerasSequentialModel kerasModel = new KerasSequentialModel.ModelBuilder() .modelJson(modelJson) .enforceTrainingConfig(false) .buildSequential(); return kerasModel.getMultiLayerConfiguration(); } /** * Constructor from HDF5 model archive stored in InputStream. * * @param modelHdf5Stream InputStream containing HDF5 archive of Keras model * @throws IOException * * TODO: Currently, this constructor does not work. It does not appear to be * possible to open an HDF5 archive from raw bytes. */ public KerasModelImport(InputStream modelHdf5Stream) throws UnsupportedOperationException, IOException, UnsupportedKerasConfigurationException, InvalidKerasConfigurationException { log.warn("Importing a Keras model from an InputStream pointing to contents of an HDF5 archive currently not supported."); throw new UnsupportedOperationException("Importing a Keras model from an InputStream currently not supported " + "because it is not possible to load an HDF5 file from a memory buffer using the HDF5 C++ API. " + "See: http://stackoverflow.com/questions/18449972/how-can-i-open-hdf5-file-from-memory-buffer-using-hdf5-c-api"); /* One very hacky workaround would be to write the InputStream out to * a temporary file and then use the "from filename" constructor to * import from that file, as follows: * * File tempFile = File.createTempFile("temporary_model_archive",".h5"); * tempFile.deleteOnExit(); * tempFile.canWrite(); * FileOutputStream tempOutputStream = new FileOutputStream(tempFile); * IOUtils.copy(modelHdf5Stream, tempOutputStream); * tempOutputStream.close(); * String tempFilename = tempFile.getAbsolutePath(); * super(tempFilename); */ } /** * Constructor from HDF5 model archive. * * @param modelHdf5Filename path to HDF5 archive storing Keras model * @throws IOException */ public KerasModelImport(String modelHdf5Filename) throws IOException, UnsupportedKerasConfigurationException, InvalidKerasConfigurationException { /* Open HDF5 archive model file. */ hdf5.H5File file = new hdf5.H5File(modelHdf5Filename, H5F_ACC_RDONLY); /* Read model and training configurations from top-level attributes. */ this.modelJson = readJsonStringFromHdf5Attribute(file, "model_config"); this.modelClassName = getModelClassName(this.modelJson); this.trainingJson = readJsonStringFromHdf5Attribute(file, "training_config"); /* Read weights from "/weights" group. */ this.weights = readWeightsFromHdf5(file, "/model_weights"); file.close(); } /** * Constructor that takes filenames for JSON model configuration and for * HDF5 weights archive. * * @param modelJsonFilename path to JSON file storing Keras Sequential model configuration * @param weightsHdf5Filename path to HDF5 archive storing Keras model weights * @throws IOException */ public KerasModelImport(String modelJsonFilename, String weightsHdf5Filename) throws IOException, InvalidKerasConfigurationException, UnsupportedKerasConfigurationException { /* Read model configuration from JSON file. */ this.modelJson = new String(Files.readAllBytes(Paths.get(modelJsonFilename))); this.modelClassName = getModelClassName(this.modelJson); /* Open HDF5 archive weights file. */ hdf5.H5File file = new hdf5.H5File(weightsHdf5Filename, H5F_ACC_RDONLY); /* Read weights from root ("/") group. */ this.weights = readWeightsFromHdf5(file, "/"); file.close(); } /** * Get model configuration JSON. * * @return model configuration JSON as string */ public String getModelJson() { return this.modelJson; } /** * Get training configuration JSON. * * @return training configuration JSON as string */ public String getTrainingJson() { return this.trainingJson; } /** * Get model class name (Model, Sequential, etc.). * * @return model class name as String */ public String getModelClassName() { return this.modelClassName; } /** * Get model weights stored as map from layer to parameter to INDArray. * * @return model weights as map from layer name to param name to INDArray */ public Map<String, Map<String, INDArray>> getWeights() { return this.weights; } /** * Read Keras model weights from specified HDF5 file and Group into a map * from layer to parameter to weights (INDArray). * * @param file open HDF5 archive file * @param weightsGroupName name of root HDF5 Group storing all Keras weights for single model * @return nested Map from layer names to parameter names to INDArrays */ private static Map<String,Map<String,INDArray>> readWeightsFromHdf5(hdf5.H5File file, String weightsGroupName) throws UnsupportedKerasConfigurationException, InvalidKerasConfigurationException { hdf5.Group weightsGroup = file.asCommonFG().openGroup(weightsGroupName); Map<String,Map<String,INDArray>> weightsMap = new HashMap<String,Map<String,INDArray>>(); List<hdf5.Group> groups = new ArrayList<hdf5.Group>(); groups.add(weightsGroup); while (!groups.isEmpty()) { hdf5.Group g = groups.remove(0); for (int i = 0; i < g.asCommonFG().getNumObjs(); i++) { BytePointer objPtr = g.asCommonFG().getObjnameByIdx(i); String objName = objPtr.getString(); int objType = g.asCommonFG().childObjType(objPtr); switch (objType) { case H5O_TYPE_DATASET: /* Keras parameter names are typically formatted as [layer name]_[layer no]_[parameter]. * For example, the weight matrix in the first Dense layer will be named "dense_1_W." */ Pattern paramNamePattern = Pattern.compile("_([^_]+?)$"); Matcher paramNameMatcher = paramNamePattern.matcher(objName); if (!paramNameMatcher.find()) throw new InvalidKerasConfigurationException("Unable to parse layer/parameter name " + objName + " for stored weights."); String paramName = paramNameMatcher.group(1); String layerName = paramNameMatcher.replaceFirst(""); /* TensorFlow backend often appends ":" followed by one * or more digits to parameter names, but this is not * reflected in the model config. We must strip it off. */ Pattern p = Pattern.compile(":\\d+?$"); Matcher m = p.matcher(paramName); if (m.find()) paramName = m.replaceFirst(""); hdf5.DataSet d = g.asCommonFG().openDataSet(objPtr); hdf5.DataSpace space = d.getSpace(); int nbDims = (int)space.getSimpleExtentNdims(); long[] dims = new long[nbDims]; space.getSimpleExtentDims(dims); float[] weightBuffer = null; FloatPointer fp = null; int j = 0; INDArray weights = null; switch (nbDims) { case 4: /* 2D Convolution weights */ weightBuffer = new float[(int)(dims[0]*dims[1]*dims[2]*dims[3])]; fp = new FloatPointer(weightBuffer); d.read(fp, new hdf5.DataType(hdf5.PredType.NATIVE_FLOAT())); fp.get(weightBuffer); weights = Nd4j.create((int)dims[0], (int)dims[1], (int)dims[2], (int)dims[3]); j = 0; for (int i1 = 0; i1 < dims[0]; i1++) for (int i2 = 0; i2 < dims[1]; i2++) for (int i3 = 0; i3 < dims[2]; i3++) for (int i4 = 0; i4 < dims[3]; i4++) weights.putScalar(i1, i2, i3, i4, weightBuffer[j++]); break; case 2: /* Dense and Recurrent weights */ weightBuffer = new float[(int)(dims[0]*dims[1])]; fp = new FloatPointer(weightBuffer); d.read(fp, new hdf5.DataType(hdf5.PredType.NATIVE_FLOAT())); fp.get(weightBuffer); weights = Nd4j.create((int)dims[0], (int)dims[1]); j = 0; for (int i1 = 0; i1 < dims[0]; i1++) for (int i2 = 0; i2 < dims[1]; i2++) weights.putScalar(i1, i2, weightBuffer[j++]); break; case 1: /* Bias */ weightBuffer = new float[(int)dims[0]]; fp = new FloatPointer(weightBuffer); d.read(fp, new hdf5.DataType(hdf5.PredType.NATIVE_FLOAT())); fp.get(weightBuffer); weights = Nd4j.create((int)dims[0]); j = 0; for (int i1 = 0; i1 < dims[0]; i1++) weights.putScalar(i1, weightBuffer[j++]); break; default: throw new UnsupportedKerasConfigurationException("Cannot import weights with rank " + nbDims); } if (!weightsMap.containsKey(layerName)) weightsMap.put(layerName, new HashMap<String, INDArray>()); weightsMap.get(layerName).put(paramName, weights); d.close(); break; case H5O_TYPE_GROUP: default: groups.add(g.asCommonFG().openGroup(objPtr)); break; } } g.close(); } file.close(); return weightsMap; } /** * Read contents of top-level string attribute from HDF5 File archive. * * @param file HDF5 File * @param attribute name of attribute * @return contents of attribute as String */ private static String readJsonStringFromHdf5Attribute(hdf5.H5File file, String attribute) throws InvalidKerasConfigurationException { hdf5.Attribute attr = file.openAttribute(attribute); hdf5.VarLenType vl = attr.getVarLenType(); int bufferSizeMult = 1; String jsonString = null; /* TODO: find a less hacky way to do this. * Reading variable length strings (from attributes) is a giant * pain. There does not appear to be any way to determine the * length of the string in advance, so we use a hack: choose a * buffer size and read the config. If Jackson fails to parse * it, then we must not have read the entire config. Increase * buffer and repeat. */ while (true) { byte[] attrBuffer = new byte[bufferSizeMult * 2000]; BytePointer attrPointer = new BytePointer(attrBuffer); attr.read(vl, attrPointer); attrPointer.get(attrBuffer); jsonString = new String(attrBuffer); ObjectMapper mapper = new ObjectMapper(); mapper.enable(DeserializationFeature.FAIL_ON_READING_DUP_TREE_KEY); try { mapper.readTree(jsonString); break; } catch (IOException e) {} bufferSizeMult++; if (bufferSizeMult > 100) { throw new InvalidKerasConfigurationException("Could not read abnormally long Keras config. Please file an issue!"); } } return jsonString; } /** * Convenience function for parsing JSON strings. * * @param modelJson string containing valid JSON * @return nested Map with arbitrary depth * @throws IOException */ private static String getModelClassName(String modelJson) throws IOException, InvalidKerasConfigurationException { ObjectMapper mapper = new ObjectMapper(); TypeReference<HashMap<String,Object>> typeRef = new TypeReference<HashMap<String,Object>>() {}; Map<String,Object> modelConfig = mapper.readValue(modelJson, typeRef); if (!modelConfig.containsKey(MODEL_FIELD_CLASS_NAME)) throw new InvalidKerasConfigurationException("Unable to determine Keras model class name."); return (String)modelConfig.get(MODEL_FIELD_CLASS_NAME); } }
WIP: fixed static methods in KerasModelImport. Former-commit-id: 371d81a4e9acdd5ba3dc2be58e77115aedd2a614
deeplearning4j-modelimport/src/main/java/org/deeplearning4j/nn/modelimport/keras/KerasModelImport.java
WIP: fixed static methods in KerasModelImport.
<ide><path>eeplearning4j-modelimport/src/main/java/org/deeplearning4j/nn/modelimport/keras/KerasModelImport.java <ide> */ <ide> public static ComputationGraph importKerasModelAndWeights(InputStream modelHdf5Stream) <ide> throws IOException, InvalidKerasConfigurationException, UnsupportedKerasConfigurationException { <del> KerasModelImport archive = new KerasModelImport(modelHdf5Stream); <del> if (!archive.getModelClassName().equals(MODEL_CLASS_NAME_MODEL)) <del> throw new InvalidKerasConfigurationException("Expected Keras model class name Model (found " + archive.getModelClassName() + ")"); <del> KerasModel kerasModel = new KerasModel.ModelBuilder() <del> .modelJson(archive.getModelJson()) <del> .trainingJson(archive.getTrainingJson()) <del>// .weights(archive.getWeights()) <del> .enforceTrainingConfig(false) <del> .buildModel(); <del> ComputationGraph model = kerasModel.getComputationGraph(); <del> return model; <add> throw new UnsupportedOperationException(""); <add>// KerasModel kerasModel = new KerasModel.ModelBuilder() <add>// .modelHdf5InputStream(modelHdf5Stream) <add>// .enforceTrainingConfig(false) <add>// .buildModel(); <add>// ComputationGraph model = kerasModel.getComputationGraph(); <add>// return model; <ide> } <ide> <ide> /** <ide> */ <ide> public static MultiLayerNetwork importKerasSequentialModelAndWeights(InputStream modelHdf5Stream) <ide> throws IOException, InvalidKerasConfigurationException, UnsupportedKerasConfigurationException { <del> KerasModelImport archive = new KerasModelImport(modelHdf5Stream); <del> if (!archive.getModelClassName().equals(MODEL_CLASS_NAME_MODEL)) <del> throw new InvalidKerasConfigurationException("Expected Keras model class name Model (found " + archive.getModelClassName() + ")"); <del> KerasSequentialModel kerasModel = new KerasModel.ModelBuilder() <del> .modelJson(archive.getModelJson()) <del> .trainingJson(archive.getTrainingJson()) <del>// .weights(archive.getWeights()) <del> .enforceTrainingConfig(false) <del> .buildSequential(); <del> MultiLayerNetwork model = kerasModel.getMultiLayerNetwork(); <del> return model; <add> throw new UnsupportedOperationException(""); <add>// KerasSequentialModel kerasModel = new KerasModel.ModelBuilder() <add>// .modelHdf5InputStream(modelHdf5Stream) <add>// .enforceTrainingConfig(false) <add>// .buildSequential(); <add>// MultiLayerNetwork model = kerasModel.getMultiLayerNetwork(); <add>// return model; <ide> } <ide> <ide> /** <ide> */ <ide> public static ComputationGraph importKerasModelAndWeights(String modelHdf5Filename) <ide> throws IOException, InvalidKerasConfigurationException, UnsupportedKerasConfigurationException { <del> KerasModelImport archive = new KerasModelImport(modelHdf5Filename); <del> if (!archive.getModelClassName().equals(MODEL_CLASS_NAME_MODEL)) <del> throw new InvalidKerasConfigurationException("Expected Keras model class name Model (found " + archive.getModelClassName() + ")"); <ide> KerasModel kerasModel = new KerasModel.ModelBuilder() <del> .modelJson(archive.getModelJson()) <del> .trainingJson(archive.getTrainingJson()) <del>// .weights(archive.getWeights()) <add> .modelHdf5Filename(modelHdf5Filename) <ide> .enforceTrainingConfig(false) <ide> .buildModel(); <ide> ComputationGraph model = kerasModel.getComputationGraph(); <ide> */ <ide> public static ComputationGraph importKerasModelAndWeights(String modelJsonFilename, String weightsHdf5Filename) <ide> throws IOException, InvalidKerasConfigurationException, UnsupportedKerasConfigurationException { <del> KerasModelImport archive = new KerasModelImport(modelJsonFilename, weightsHdf5Filename); <del> if (!archive.getModelClassName().equals(MODEL_CLASS_NAME_MODEL)) <del> throw new InvalidKerasConfigurationException("Expected Keras model class name Model (found " + archive.getModelClassName() + ")"); <ide> KerasModel kerasModel = new KerasModel.ModelBuilder() <del> .modelJson(archive.getModelJson()) <del>// .weights(archive.getWeights()) <add> .modelJsonFilename(modelJsonFilename) <add> .weightsHdf5Filename(weightsHdf5Filename) <ide> .enforceTrainingConfig(false) <ide> .buildModel(); <ide> ComputationGraph model = kerasModel.getComputationGraph(); <ide> */ <ide> public static ComputationGraphConfiguration importKerasModelConfiguration(String modelJsonFilename) <ide> throws IOException, InvalidKerasConfigurationException, UnsupportedKerasConfigurationException { <del> String modelJson = new String(Files.readAllBytes(Paths.get(modelJsonFilename))); <ide> KerasModel kerasModel = new KerasModel.ModelBuilder() <del> .modelJson(modelJson) <add> .modelJsonFilename(modelJsonFilename) <ide> .enforceTrainingConfig(false) <ide> .buildModel(); <ide> return kerasModel.getComputationGraphConfiguration(); <ide> */ <ide> public static MultiLayerConfiguration importKerasSequentialConfiguration(String modelJsonFilename) <ide> throws IOException, InvalidKerasConfigurationException, UnsupportedKerasConfigurationException { <del> String modelJson = new String(Files.readAllBytes(Paths.get(modelJsonFilename))); <ide> KerasSequentialModel kerasModel = new KerasSequentialModel.ModelBuilder() <del> .modelJson(modelJson) <add> .modelJsonFilename(modelJsonFilename) <ide> .enforceTrainingConfig(false) <ide> .buildSequential(); <ide> return kerasModel.getMultiLayerConfiguration();
Java
apache-2.0
6a5eddcbcbfc8343f358613861797b4303322f67
0
jclouds/legacy-jclouds-karaf
/** * Licensed to jclouds, Inc. (jclouds) under one or more * contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. jclouds licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.jclouds.karaf.commands.blobstore; import java.io.File; import java.io.InputStream; import org.apache.felix.gogo.commands.Argument; import org.apache.felix.gogo.commands.Command; import org.apache.felix.gogo.commands.Option; import org.jclouds.blobstore.BlobStore; import com.google.common.base.Charsets; import com.google.common.base.Strings; import com.google.common.io.CharStreams; import com.google.common.io.Files; import com.google.common.io.InputSupplier; /** * @author: iocanel */ @Command(scope = "jclouds", name = "blobstore-read", description = "Reads data from the blobstore") public class BlobReadCommand extends BlobStoreCommandWithOptions { @Argument(index = 0, name = "containerName", description = "The name of the container", required = true, multiValued = false) String containerName; @Argument(index = 1, name = "blobName", description = "The name of the blob", required = true, multiValued = false) String blobName; @Option(name = "-f", aliases = "--to-file", description = "The file to store the blob", required = false, multiValued = false) String file; @Option(name = "-d", aliases = "--display", description = "Display the content to the console", required = false, multiValued = false) boolean display; @Override protected Object doExecute() throws Exception { BlobStore blobStore = null; try { blobStore = getBlobStore(); } catch (Throwable t) { System.err.println(t.getMessage()); return null; } InputSupplier<InputStream> supplier = getBlobInputStream(blobStore, containerName, blobName); if (!Strings.isNullOrEmpty(file)) { File f = new File(file); if (!f.exists() && f.createNewFile()) { Files.copy(supplier, f); } } if (display) { CharStreams.copy(CharStreams.newReaderSupplier(supplier, Charsets.UTF_8), System.err); } return null; } }
commands/src/main/java/org/jclouds/karaf/commands/blobstore/BlobReadCommand.java
/** * Licensed to jclouds, Inc. (jclouds) under one or more * contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. jclouds licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.jclouds.karaf.commands.blobstore; import java.io.File; import java.io.FileOutputStream; import java.io.InputStream; import org.apache.felix.gogo.commands.Argument; import org.apache.felix.gogo.commands.Command; import org.apache.felix.gogo.commands.Option; import org.jclouds.blobstore.BlobStore; import com.google.common.base.Strings; import com.google.common.io.ByteStreams; import com.google.common.io.Files; import com.google.common.io.InputSupplier; /** * @author: iocanel */ @Command(scope = "jclouds", name = "blobstore-read", description = "Reads data from the blobstore") public class BlobReadCommand extends BlobStoreCommandWithOptions { @Argument(index = 0, name = "containerName", description = "The name of the container", required = true, multiValued = false) String containerName; @Argument(index = 1, name = "blobName", description = "The name of the blob", required = true, multiValued = false) String blobName; @Option(name = "-f", aliases = "--to-file", description = "The file to store the blob", required = false, multiValued = false) String file; @Option(name = "-d", aliases = "--display", description = "Display the content to the console", required = false, multiValued = false) boolean display; @Override protected Object doExecute() throws Exception { BlobStore blobStore = null; try { blobStore = getBlobStore(); } catch (Throwable t) { System.err.println(t.getMessage()); return null; } InputSupplier<InputStream> supplier = getBlobInputStream(blobStore, containerName, blobName); if (!Strings.isNullOrEmpty(file)) { File f = new File(file); if (!f.exists() && f.createNewFile()) { Files.copy(supplier, f); } } if (display) { System.err.println(new String(ByteStreams.toByteArray(supplier))); } return null; } }
Stream display output
commands/src/main/java/org/jclouds/karaf/commands/blobstore/BlobReadCommand.java
Stream display output
<ide><path>ommands/src/main/java/org/jclouds/karaf/commands/blobstore/BlobReadCommand.java <ide> package org.jclouds.karaf.commands.blobstore; <ide> <ide> import java.io.File; <del>import java.io.FileOutputStream; <ide> import java.io.InputStream; <ide> <ide> import org.apache.felix.gogo.commands.Argument; <ide> import org.apache.felix.gogo.commands.Option; <ide> import org.jclouds.blobstore.BlobStore; <ide> <add>import com.google.common.base.Charsets; <ide> import com.google.common.base.Strings; <del>import com.google.common.io.ByteStreams; <add>import com.google.common.io.CharStreams; <ide> import com.google.common.io.Files; <ide> import com.google.common.io.InputSupplier; <ide> <ide> } <ide> <ide> if (display) { <del> System.err.println(new String(ByteStreams.toByteArray(supplier))); <add> CharStreams.copy(CharStreams.newReaderSupplier(supplier, Charsets.UTF_8), System.err); <ide> } <ide> return null; <ide> }
Java
epl-1.0
759fc09398bd1a0af3c4cd8868262be951bb97be
0
gnodet/wikitext
/******************************************************************************* * Copyright (c) 2004 - 2006 University Of British Columbia and others. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html * * Contributors: * University Of British Columbia - initial API and implementation *******************************************************************************/ package org.eclipse.mylar.internal.tasklist.ui.wizards; import org.eclipse.jface.viewers.IStructuredSelection; import org.eclipse.jface.wizard.Wizard; import org.eclipse.mylar.provisional.tasklist.AbstractRepositoryConnector; import org.eclipse.mylar.provisional.tasklist.MylarTaskListPlugin; import org.eclipse.mylar.provisional.tasklist.TaskRepository; import org.eclipse.ui.INewWizard; import org.eclipse.ui.IWorkbench; /** * @author Mik Kersten */ public class EditRepositoryWizard extends Wizard implements INewWizard { private AbstractRepositorySettingsPage abstractRepositorySettingsPage;// = private TaskRepository oldRepository; public EditRepositoryWizard(TaskRepository repository) { super(); oldRepository = repository; // super.setForcePreviousAndNextButtons(true); AbstractRepositoryConnector connector = MylarTaskListPlugin.getRepositoryManager().getRepositoryConnector( repository.getKind()); abstractRepositorySettingsPage = connector.getSettingsPage(); abstractRepositorySettingsPage.setRepository(repository); abstractRepositorySettingsPage.setVersion(repository.getVersion()); abstractRepositorySettingsPage.setWizard(this); } @Override public boolean performFinish() { if (canFinish()) { MylarTaskListPlugin.getRepositoryManager().removeRepository(oldRepository); TaskRepository repository = new TaskRepository(abstractRepositorySettingsPage.getRepository().getKind(), abstractRepositorySettingsPage.getServerUrl(), abstractRepositorySettingsPage.getVersion()); repository.setAuthenticationCredentials(abstractRepositorySettingsPage.getUserName(), abstractRepositorySettingsPage.getPassword()); MylarTaskListPlugin.getRepositoryManager().addRepository(repository); return true; } return false; } public void init(IWorkbench workbench, IStructuredSelection selection) { } @Override public void addPages() { addPage(abstractRepositorySettingsPage); } @Override public boolean canFinish() { return abstractRepositorySettingsPage.isPageComplete(); } }
org.eclipse.mylyn.tasks.ui/src/org/eclipse/mylyn/internal/tasklist/ui/wizards/EditRepositoryWizard.java
/******************************************************************************* * Copyright (c) 2004 - 2006 University Of British Columbia and others. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html * * Contributors: * University Of British Columbia - initial API and implementation *******************************************************************************/ package org.eclipse.mylar.internal.tasklist.ui.wizards; import org.eclipse.jface.viewers.IStructuredSelection; import org.eclipse.jface.wizard.Wizard; import org.eclipse.mylar.provisional.tasklist.AbstractRepositoryConnector; import org.eclipse.mylar.provisional.tasklist.MylarTaskListPlugin; import org.eclipse.mylar.provisional.tasklist.TaskRepository; import org.eclipse.ui.INewWizard; import org.eclipse.ui.IWorkbench; /** * @author Mik Kersten */ public class EditRepositoryWizard extends Wizard implements INewWizard { private AbstractRepositorySettingsPage abstractRepositorySettingsPage;// = private TaskRepository oldRepository; public EditRepositoryWizard(TaskRepository repository) { super(); oldRepository = repository; // super.setForcePreviousAndNextButtons(true); AbstractRepositoryConnector connector = MylarTaskListPlugin.getRepositoryManager().getRepositoryConnector( repository.getKind()); abstractRepositorySettingsPage = connector.getSettingsPage(); abstractRepositorySettingsPage.setRepository(repository); abstractRepositorySettingsPage.setVersion(repository.getVersion()); abstractRepositorySettingsPage.setWizard(this); } @Override public boolean performFinish() { if (canFinish()) { TaskRepository repository = new TaskRepository(abstractRepositorySettingsPage.getRepository().getKind(), abstractRepositorySettingsPage.getServerUrl(), abstractRepositorySettingsPage.getVersion()); repository.setAuthenticationCredentials(abstractRepositorySettingsPage.getUserName(), abstractRepositorySettingsPage.getPassword()); MylarTaskListPlugin.getRepositoryManager().removeRepository(oldRepository); MylarTaskListPlugin.getRepositoryManager().addRepository(repository); return true; } return false; } public void init(IWorkbench workbench, IStructuredSelection selection) { } @Override public void addPages() { addPage(abstractRepositorySettingsPage); } @Override public boolean canFinish() { return abstractRepositorySettingsPage.isPageComplete(); } }
Completed: 125173: add support for bugzilla 2.22 https://bugs.eclipse.org/bugs/show_bug.cgi?id=125173
org.eclipse.mylyn.tasks.ui/src/org/eclipse/mylyn/internal/tasklist/ui/wizards/EditRepositoryWizard.java
Completed: 125173: add support for bugzilla 2.22 https://bugs.eclipse.org/bugs/show_bug.cgi?id=125173
<ide><path>rg.eclipse.mylyn.tasks.ui/src/org/eclipse/mylyn/internal/tasklist/ui/wizards/EditRepositoryWizard.java <ide> @Override <ide> public boolean performFinish() { <ide> if (canFinish()) { <add> MylarTaskListPlugin.getRepositoryManager().removeRepository(oldRepository); <ide> TaskRepository repository = new TaskRepository(abstractRepositorySettingsPage.getRepository().getKind(), <ide> abstractRepositorySettingsPage.getServerUrl(), abstractRepositorySettingsPage.getVersion()); <ide> repository.setAuthenticationCredentials(abstractRepositorySettingsPage.getUserName(), <del> abstractRepositorySettingsPage.getPassword()); <del> MylarTaskListPlugin.getRepositoryManager().removeRepository(oldRepository); <add> abstractRepositorySettingsPage.getPassword()); <ide> MylarTaskListPlugin.getRepositoryManager().addRepository(repository); <ide> return true; <ide> }
Java
apache-2.0
f4088cda280df9eedd90fc4625389bc9149e2e84
0
kelemen/JTrim,kelemen/JTrim
package org.jtrim.image.transform; import java.util.ArrayList; import java.util.List; import org.jtrim.concurrent.TaskExecutorService; import org.jtrim.concurrent.async.*; /** * Defines an {@link AsyncDataQuery} which transforms its input image on the * specified {@link TaskExecutorService}. There can be multiple transformations * defined, each defining a more and more accurate transformation. That is, * {@code ImageTransformerLink} applies each transformation to its input and * forwards each transformed image assuming that the last transformation applied * is the most accurate one. * <P> * Note that essentially each transformation defines the same transformation, * they should only differ in the accuracy of their result. * <P> * This class is effectively a factory for {@link ImageTransformerLink} * instances. That is, the {@link #createDataLink(ImageTransformerData) createDataLink} * method creates an {@code ImageTransformerLink} with the arguments specified * at construction time. * * <h3>Thread safety</h3> * The methods of this class are safe to be accessed by multiple threads * concurrently. * * <h4>Synchronization transparency</h4> * The methods of this class are not <I>synchronization transparent</I>. * * @see org.jtrim.swing.component.AsyncImageDisplay * @see ImageTransformerLink */ public final class ImageTransformerQuery implements AsyncDataQuery<ImageTransformerData, TransformedImageData> { private final List<AsyncDataConverter<ImageTransformerData, TransformedImageData>> imageTransformers; /** * Creates a new {@code ImageTransformerQuery} which will execute the image * transformations on the specified executor. * * @param executor the {@code TaskExecutorService} on which the image * transformers are called. Each image transformation is called in a * separate task submitted to this executor. This argument cannot be * {@code null}. * @param imageTransformers the transformations to be applied to the input * image. This argument cannot be {@code null} and cannot contain * {@code null} elements. * * @throws NullPointerException throw if the specified executor or the image * transformation array or any of its transformation is {@code null} */ public ImageTransformerQuery(TaskExecutorService executor, ImageTransformer... imageTransformers) { List<AsyncDataConverter<ImageTransformerData, TransformedImageData>> taskList; taskList = new ArrayList<>(imageTransformers.length); for (ImageTransformer transformer: imageTransformers) { DataConverter<ImageTransformerData, TransformedImageData> converter; converter = new ImageConverter(transformer); taskList.add(new AsyncDataConverter<>(converter, executor)); } this.imageTransformers = taskList; } /** * Creates a new {@code ImageTransformerQuery} applying the specified image * transformations on input images. The transformations are executed on * the associated {@code TaskExecutorService}. * * @param imageTransformers the transformations (with their associated * {@code TaskExecutorService}) to be applied to the input image. This * argument cannot be {@code null} and cannot contain {@code null} * elements. * * @throws NullPointerException thrown if the specified image transformation * list is {@code null} or contains {@code null} elements */ public ImageTransformerQuery( List<AsyncDataConverter<ImageTransformerData, TransformedImage>> imageTransformers) { this.imageTransformers = new ArrayList<>(imageTransformers.size()); for (AsyncDataConverter<ImageTransformerData, TransformedImage> converter: imageTransformers) { this.imageTransformers.add(new AsyncDataConverter<>( new ImageConverter(converter.getConverter()), converter.getExecutor() )); } } /** * {@inheritDoc } */ @Override public ImageTransformerLink createDataLink(ImageTransformerData arg) { return ImageTransformerLink.createFromDataTransformers( arg, imageTransformers); } /** * Returns the string representation of this {@code AsyncDataQuery} in no * particular format * <P> * This method is intended to be used for debugging only. * * @return the string representation of this object in no particular format. * This method never returns {@code null}. */ @Override public String toString() { String imageTransformersStr = AsyncFormatHelper.collectionToString(imageTransformers); StringBuilder result = new StringBuilder(); result.append("Tranform images gradually using "); AsyncFormatHelper.appendIndented(imageTransformersStr, result); return result.toString(); } }
jtrim-gui/src/main/java/org/jtrim/image/transform/ImageTransformerQuery.java
package org.jtrim.image.transform; import java.util.ArrayList; import java.util.List; import org.jtrim.concurrent.TaskExecutorService; import org.jtrim.concurrent.async.*; public final class ImageTransformerQuery implements AsyncDataQuery<ImageTransformerData, TransformedImageData> { private final List<AsyncDataConverter<ImageTransformerData, TransformedImageData>> imageTransformers; public ImageTransformerQuery(TaskExecutorService executor, ImageTransformer... imageTransformers) { List<AsyncDataConverter<ImageTransformerData, TransformedImageData>> taskList; taskList = new ArrayList<>(imageTransformers.length); for (ImageTransformer transformer: imageTransformers) { DataConverter<ImageTransformerData, TransformedImageData> converter; converter = new ImageConverter(transformer); taskList.add(new AsyncDataConverter<>(converter, executor)); } this.imageTransformers = taskList; } public ImageTransformerQuery( List<AsyncDataConverter<ImageTransformerData, TransformedImage>> imageTransformers) { this.imageTransformers = new ArrayList<>(imageTransformers.size()); for (AsyncDataConverter<ImageTransformerData, TransformedImage> converter: imageTransformers) { this.imageTransformers.add(new AsyncDataConverter<>( new ImageConverter(converter.getConverter()), converter.getExecutor() )); } } @Override public AsyncDataLink<TransformedImageData> createDataLink(ImageTransformerData arg) { return ImageTransformerLink.createFromDataTransformers( arg, imageTransformers); } @Override public String toString() { String imageTransformersStr = AsyncFormatHelper.collectionToString(imageTransformers); StringBuilder result = new StringBuilder(); result.append("Tranform images gradually using "); AsyncFormatHelper.appendIndented(imageTransformersStr, result); return result.toString(); } }
Documented ImageTransformerQuery
jtrim-gui/src/main/java/org/jtrim/image/transform/ImageTransformerQuery.java
Documented ImageTransformerQuery
<ide><path>trim-gui/src/main/java/org/jtrim/image/transform/ImageTransformerQuery.java <ide> import org.jtrim.concurrent.TaskExecutorService; <ide> import org.jtrim.concurrent.async.*; <ide> <add>/** <add> * Defines an {@link AsyncDataQuery} which transforms its input image on the <add> * specified {@link TaskExecutorService}. There can be multiple transformations <add> * defined, each defining a more and more accurate transformation. That is, <add> * {@code ImageTransformerLink} applies each transformation to its input and <add> * forwards each transformed image assuming that the last transformation applied <add> * is the most accurate one. <add> * <P> <add> * Note that essentially each transformation defines the same transformation, <add> * they should only differ in the accuracy of their result. <add> * <P> <add> * This class is effectively a factory for {@link ImageTransformerLink} <add> * instances. That is, the {@link #createDataLink(ImageTransformerData) createDataLink} <add> * method creates an {@code ImageTransformerLink} with the arguments specified <add> * at construction time. <add> * <add> * <h3>Thread safety</h3> <add> * The methods of this class are safe to be accessed by multiple threads <add> * concurrently. <add> * <add> * <h4>Synchronization transparency</h4> <add> * The methods of this class are not <I>synchronization transparent</I>. <add> * <add> * @see org.jtrim.swing.component.AsyncImageDisplay <add> * @see ImageTransformerLink <add> */ <ide> public final class ImageTransformerQuery <ide> implements <ide> AsyncDataQuery<ImageTransformerData, TransformedImageData> { <ide> <ide> private final List<AsyncDataConverter<ImageTransformerData, TransformedImageData>> imageTransformers; <ide> <add> /** <add> * Creates a new {@code ImageTransformerQuery} which will execute the image <add> * transformations on the specified executor. <add> * <add> * @param executor the {@code TaskExecutorService} on which the image <add> * transformers are called. Each image transformation is called in a <add> * separate task submitted to this executor. This argument cannot be <add> * {@code null}. <add> * @param imageTransformers the transformations to be applied to the input <add> * image. This argument cannot be {@code null} and cannot contain <add> * {@code null} elements. <add> * <add> * @throws NullPointerException throw if the specified executor or the image <add> * transformation array or any of its transformation is {@code null} <add> */ <ide> public ImageTransformerQuery(TaskExecutorService executor, ImageTransformer... imageTransformers) { <ide> List<AsyncDataConverter<ImageTransformerData, TransformedImageData>> taskList; <ide> taskList = new ArrayList<>(imageTransformers.length); <ide> this.imageTransformers = taskList; <ide> } <ide> <add> /** <add> * Creates a new {@code ImageTransformerQuery} applying the specified image <add> * transformations on input images. The transformations are executed on <add> * the associated {@code TaskExecutorService}. <add> * <add> * @param imageTransformers the transformations (with their associated <add> * {@code TaskExecutorService}) to be applied to the input image. This <add> * argument cannot be {@code null} and cannot contain {@code null} <add> * elements. <add> * <add> * @throws NullPointerException thrown if the specified image transformation <add> * list is {@code null} or contains {@code null} elements <add> */ <ide> public ImageTransformerQuery( <ide> List<AsyncDataConverter<ImageTransformerData, TransformedImage>> imageTransformers) { <ide> <ide> } <ide> } <ide> <add> /** <add> * {@inheritDoc } <add> */ <ide> @Override <del> public AsyncDataLink<TransformedImageData> createDataLink(ImageTransformerData arg) { <add> public ImageTransformerLink createDataLink(ImageTransformerData arg) { <ide> return ImageTransformerLink.createFromDataTransformers( <ide> arg, imageTransformers); <ide> } <ide> <add> /** <add> * Returns the string representation of this {@code AsyncDataQuery} in no <add> * particular format <add> * <P> <add> * This method is intended to be used for debugging only. <add> * <add> * @return the string representation of this object in no particular format. <add> * This method never returns {@code null}. <add> */ <ide> @Override <ide> public String toString() { <ide> String imageTransformersStr